Repository: ngageoint/elasticgeo Branch: master Commit: 91c2a99ccea6 Files: 99 Total size: 707.0 KB Directory structure: gitextract_d1ek5of1/ ├── .gitignore ├── .travis.yml ├── LICENSE ├── README.md ├── gs-web-elasticsearch/ │ ├── LICENSE │ ├── doc/ │ │ └── index.rst │ ├── pom.xml │ └── src/ │ ├── assembly/ │ │ └── dist.xml │ └── main/ │ ├── java/ │ │ ├── applicationContext.xml │ │ └── mil/ │ │ └── nga/ │ │ └── giat/ │ │ └── elasticsearch/ │ │ ├── ElasticAttributeProvider.java │ │ ├── ElasticConfigurationPage.html │ │ ├── ElasticConfigurationPage.java │ │ ├── ElasticConfigurationPanel.html │ │ ├── ElasticConfigurationPanel.java │ │ ├── ElasticConfigurationPanelInfo.java │ │ ├── ElasticFeatureTypeCallback.java │ │ ├── ElasticXStreamInitializer.java │ │ └── ElasticXStreamPersisterInitializer.java │ └── resources/ │ └── GeoServerApplication.properties ├── gt-elasticsearch/ │ ├── LGPL │ ├── LICENSE │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── mil/ │ │ │ └── nga/ │ │ │ └── giat/ │ │ │ └── data/ │ │ │ └── elasticsearch/ │ │ │ ├── ElasticAggregation.java │ │ │ ├── ElasticAttribute.java │ │ │ ├── ElasticCapabilities.java │ │ │ ├── ElasticClient.java │ │ │ ├── ElasticConstants.java │ │ │ ├── ElasticDataStore.java │ │ │ ├── ElasticDataStoreFactory.java │ │ │ ├── ElasticFeatureReader.java │ │ │ ├── ElasticFeatureReaderScroll.java │ │ │ ├── ElasticFeatureSource.java │ │ │ ├── ElasticFeatureTypeBuilder.java │ │ │ ├── ElasticHit.java │ │ │ ├── ElasticLayerConfiguration.java │ │ │ ├── ElasticMappings.java │ │ │ ├── ElasticParserUtil.java │ │ │ ├── ElasticRequest.java │ │ │ ├── ElasticResponse.java │ │ │ ├── ElasticResults.java │ │ │ ├── FilterToElastic.java │ │ │ ├── FilterToElasticException.java │ │ │ ├── FilterToElasticHelper.java │ │ │ ├── GeohashUtil.java │ │ │ ├── RestElasticClient.java │ │ │ └── TotalDeserializer.java │ │ └── resources/ │ │ └── META-INF/ │ │ └── services/ │ │ └── org.geotools.data.DataStoreFactorySpi │ └── test/ │ ├── java/ │ │ └── mil/ │ │ └── nga/ │ │ └── giat/ │ │ └── data/ │ │ └── elasticsearch/ │ │ ├── ElasticAggregationReaderTest.java │ │ ├── ElasticAttributeTest.java │ │ ├── ElasticDataStoreFinderIT.java │ │ ├── ElasticDataStoreIT.java │ │ ├── ElasticDatastoreFactoryTest.java │ │ ├── ElasticFeatureFilterIT.java │ │ ├── ElasticFilterTest.java │ │ ├── ElasticGeometryFilterIT.java │ │ ├── ElasticParserUtilTest.java │ │ ├── ElasticResponseTest.java │ │ ├── ElasticTemporalFilterIT.java │ │ ├── ElasticTestSupport.java │ │ ├── ElasticViewParametersFilterIT.java │ │ ├── GeohashUtilTest.java │ │ ├── RandomGeometryBuilder.java │ │ └── RestElasticClientTest.java │ └── resources/ │ ├── README.md │ ├── active_mappings.json │ ├── active_mappings_legacy.json │ ├── active_mappings_ng.json │ ├── log4j.properties │ ├── logging.properties │ ├── mockito-extensions/ │ │ └── org.mockito.plugins.MockMaker │ ├── requirements.txt │ ├── test_index.py │ └── wifiAccessPoint.json ├── gt-elasticsearch-process/ │ ├── LGPL │ ├── LICENSE │ ├── pom.xml │ └── src/ │ ├── main/ │ │ ├── java/ │ │ │ └── mil/ │ │ │ └── nga/ │ │ │ └── giat/ │ │ │ └── process/ │ │ │ └── elasticsearch/ │ │ │ ├── BBOXRemovingFilterVisitor.java │ │ │ ├── BasicGeoHashGrid.java │ │ │ ├── GeoHashGrid.java │ │ │ ├── GeoHashGridProcess.java │ │ │ ├── GridCell.java │ │ │ ├── GridCoverageUtil.java │ │ │ ├── MetricGeoHashGrid.java │ │ │ ├── NestedAggGeoHashGrid.java │ │ │ └── RasterScale.java │ │ └── resources/ │ │ └── META-INF/ │ │ └── services/ │ │ ├── org.geotools.process.ProcessFactory │ │ └── org.geotools.process.vector.VectorProcess │ └── test/ │ └── java/ │ └── mil/ │ └── nga/ │ └── giat/ │ └── process/ │ └── elasticsearch/ │ ├── GeoHashGridProcessTest.java │ ├── GeoHashGridTest.java │ ├── GridCoverageUtilTest.java │ ├── MetricGeoHashGridTest.java │ ├── NestedAggGeoHashGridTest.java │ ├── RasterScaleTest.java │ └── TestUtil.java ├── joda-shaded/ │ ├── LICENSE.txt │ ├── NOTICE.txt │ ├── pom.xml │ └── src/ │ └── main/ │ └── java/ │ └── org/ │ └── elasticsearch/ │ └── common/ │ └── Strings.java └── pom.xml ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitignore ================================================ **target/ **.settings/ **.project **.classpath *.prefs *.log *.iml **idea/ ================================================ FILE: .travis.yml ================================================ sudo: true services: - docker language: java jdk: - openjdk8 env: matrix: - GEOTOOLS_VERSION='22.0' GEOSERVER_VERSION='2.16.0' ES_VERSION='7.4.0' - GEOTOOLS_VERSION='22.0' GEOSERVER_VERSION='2.16.0' ES_VERSION='6.8.3' - GEOTOOLS_VERSION='22.0' GEOSERVER_VERSION='2.16.0' ES_VERSION='5.6.16' - GEOTOOLS_VERSION='22.0' GEOSERVER_VERSION='2.16.0' ES_VERSION='2.4.5' ARGS='-Ddocker.image=elasticsearch' - GEOTOOLS_VERSION='21.3' GEOSERVER_VERSION='2.15.3' ES_VERSION='7.4.0' - GEOTOOLS_VERSION='21.3' GEOSERVER_VERSION='2.15.3' ES_VERSION='6.8.3' - GEOTOOLS_VERSION='21.3' GEOSERVER_VERSION='2.15.3' ES_VERSION='5.6.16' - GEOTOOLS_VERSION='21.3' GEOSERVER_VERSION='2.15.3' ES_VERSION='2.4.5' ARGS='-Ddocker.image=elasticsearch' cache: directories: - "$HOME/.m2" install: - mvn --version - travis_retry mvn install -DskipTests=true -Dskip.integration.tests=true -B -Dgeotools.version=${GEOTOOLS_VERSION} -Dgeoserver.version=${GEOSERVER_VERSION} -Des.test.version=${ES_VERSION} ${ARGS} script: - sudo sysctl -w vm.max_map_count=262144 - travis_retry mvn verify -Dgeotools.version=${GEOTOOLS_VERSION} -Dgeoserver.version=${GEOSERVER_VERSION} -Des.test.version=${ES_VERSION} ${ARGS} after_success: - mvn coveralls:report before_deploy: - mvn assembly:assembly -pl gs-web-elasticsearch - export PLUGIN_FILE="$(ls gs-web-elasticsearch/target/elasticgeo*.zip)"; - echo "Deploying $PLUGIN_FILE to GitHub releases" deploy: provider: releases api_key: secure: UiMZusRpTSbs3BQWZxououT1VPj8LzUN4UYH8H+PcPdEEZfx/lDNCxlaj1YG8eVCflJL1TItPdU8jIEU435weP4v3cnSyUjq/3Oc4ey9CK/iuphoqvkqfCgRxpVsj9wuZkJqLobi9+9JFLz6NziCnX9ME5WxRgMPlfRU2jot/GM= file_glob: true skip_cleanup: true file: "$PLUGIN_FILE" on: tags: true ================================================ FILE: LICENSE ================================================ GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. {one line to give the program's name and a brief idea of what it does.} Copyright (C) {year} {name of author} This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: {project} Copyright (C) {year} {fullname} This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . ================================================ FILE: README.md ================================================ # ElasticGeo Travis-CI test status
Coverage Status ElasticGeo provides a GeoTools data store that allows geospatial features from an Elasticsearch index to be published via OGC services using GeoServer. ### Pull Request If you'd like to contribute to this project, please make a pull request. We'll review the pull request and discuss the changes. All pull request contributions to this project will be released under the appropriate license conditions discussed below. Software source code previously released under an open source license and then modified by NGA staff is considered a "joint work" (see 17 USC 101); it is partially copyrighted, partially public domain, and as a whole is protected by the copyrights of the non-government authors and must be released according to the terms of the original open source license. ### Project relies upon: GeoTools under [LGPL v 2.1](http://geotools.org/about.html) GeoServer under [GPL v 2 with later option](http://geoserver.org/license/) ElasticGeo under [LGPL v 2.1](https://github.com/matsjg/elasticgeo) Elasticsearch under [Apache License v 2.0](https://github.com/elastic/elasticsearch/blob/master/LICENSE.txt) ### Documentation [Read more](gs-web-elasticsearch/doc/index.rst) ================================================ FILE: gs-web-elasticsearch/LICENSE ================================================ GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. {one line to give the program's name and a brief idea of what it does.} Copyright (C) {year} {name of author} This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: {project} Copyright (C) {year} {fullname} This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . ================================================ FILE: gs-web-elasticsearch/doc/index.rst ================================================ Elasticsearch GeoServer Data Store ================================== Elasticsearch is a popular distributed search and analytics engine that enables complex search features in near real-time. Default field type mappings support string, numeric, boolean and date types and allow complex, hierarchical documents. Custom field type mappings can be defined for geospatial document fields. The ``geo_point`` type supports point geometries that can be specified through a coordinate string, geohash or coordinate array. The ``geo_shape`` type supports Point, LineString, Polygon, MultiPoint, MultiLineString, MultiPolygon and GeometryCollection GeoJSON types as well as envelope and circle types. Custom options allow configuration of the type and precision of the spatial index. This data store allows features from an Elasticsearch index to be published through GeoServer. Both ``geo_point`` and ``geo_shape`` type mappings are supported. OGC filters are converted to Elasticsearch queries and can be combined with native Elasticsearch queries in WMS and WFS requests. .. contents:: Contents: Compatibility ------------- * Java: 1.8 * GeoServer: 2.16.x * Elasticsearch: 2.4.x, 5.x, 6.x, 7.x Downloads --------- Pre-compiled binaries can be found on the `GitHub releases page `_. Installation ------------ Pre-compiled binaries ^^^^^^^^^^^^^^^^^^^^^ Unpack zipfile and copy plugin file to the ``WEB_INF/lib`` directory of your GeoServer installation and then restart GeoServer. Building ^^^^^^^^ Clone project:: $ git clone git@github.com:ngageoint/elasticgeo.git Build and install plugin (requires GeoServer restart):: $ mvn clean install -DskipTests=true -Dskip.integration.tests=true $ cp gs-web-elasticsearch/target/elasticgeo*.jar GEOSERVER_HOME/WEB_INF/lib Run default tests:: $ mvn verify -Dskip.integration.tests=true Run default and integration tests (requires `Docker `_):: $ mvn verify Note running integration tests in an IDE development environment requires that a local Elasticsearch instance is running and accepting HTTP connections over port 9200 (see `Elasticsearch documentation `_). Configuration ------------- Configuring data store ^^^^^^^^^^^^^^^^^^^^^^ Once the Elasticsearch GeoServer extension is installed, ``Elasticsearch index`` will be an available vector data source format when creating a new data store. .. |new_store| image:: images/elasticsearch_store.png :scale: 100% :align: middle +-------------+ | |new_store| | +-------------+ .. _config_elasticsearch: The Elasticsearch data store configuration panel includes connection parameters and search settings. .. |store_config| raw:: html +----------------+ | |store_config| | +----------------+ Available data store configuration parameters are summarized in the following table: .. list-table:: :widths: 20 80 * - Parameter - Description * - elasticsearch_host - Host (IP) for connecting to Elasticsearch. HTTP scheme and port can optionally be included to override the defaults. Multiple hosts can be provided. Examples:: localhost localhost:9200 http://localhost http://localhost:9200 https://localhost:9200 https://somehost.somedomain:9200,https://anotherhost.somedomain:9200 * - elasticsearch_port - Default HTTP port for connecting to Elasticsearch. Ignored if the hostname includes the port. * - user - Elasticsearch user. Must have superuser privilege on index. * - passwd - Elasticsearch user password * - runas_geoserver_user - Whether to submit requests on behalf of the authenticated GeoServer user * - proxy_user - Elasticsearch user for document queries. If not provided then admin user credentials are used for all requests. * - proxy_passwd - Elasticsearch proxy user password * - index_name - Index name or alias (wildcards supported) * - reject_unauthorized - Whether to validate the server certificate during the SSL handshake for https connections * - default_max_features - Default used when maxFeatures is unlimited * - source_filtering_enabled - Whether to enable filtering of the _source field * - scroll_enabled - Enable the Elasticsearch scan and scroll API * - scroll_size - Number of documents per shard when using the scroll API * - scroll_time - Search context timeout when using the scroll API * - array_encoding - Array encoding strategy. Allowed values are ``JSON`` (keep arrays) and ``CSV`` (keep first array element). * - grid_size - Hint for Geohash grid size (numRows*numCols) * - grid_threshold - Geohash grid aggregation precision will be the minimum necessary so that actual_grid_size/grid_size > grid_threshold Configuring authentication ~~~~~~~~~~~~~~~~~~~~~~~~~~ Basic authentication is supported through the ``user`` and ``passwd`` credential parameters. The provided user must have superuser privilege on the index to enable the mapping and alias requests performed during store initialization. Optional ``proxy_user`` and ``proxy_passwd`` parameters can be used to specify an alternate user for document search (OGC service) requests. The proxy user can have restricted privileges on the index through document level security. If not provided the default user is used for all requests. The ``runas_geoserver_user`` flag can be used to enable Elasticsearch requests to be submitted on behalf of the authenticated GeoServer user. When the run-as mechanism is configured the plugin will add the ``es-security-runas-user`` header with the authenticated GeoServer username. See `X-Pack run-as documentation `_ for more information. Note the run-as mechanism is applied only to document search requests. For added security it is recommended to define ``proxy_user`` and ``proxy_passwd`` when using the run-as mechanism. The proxy user will be used when submitting requests on behalf of the GeoServer user and can have restricted privileges enabling access only to documents that all users can have access to. The plugin can optionally be deployed to require user credentials and proxy credentials and to force the use of ``runas_geoserver_user`` by setting the environment variable ``org.geoserver.elasticsearch.xpack.force-runas``:: $ export JAVA_OPTS="-Dorg.geoserver.elasticsearch.xpack.force-runas $JAVA_OPTS" Configuring HTTPS/SSL ~~~~~~~~~~~~~~~~~~~~~ System properties are supported for SSL/TLS configuration:: javax.net.ssl.trustStore javax.net.ssl.trustStorePassword javax.net.ssl.keyStore javax.net.ssl.keyStorePassword See `HttpClientBuilder `_ documentation for available properties. For example use ``javax.net.ssl.trustStore[Password]`` to validate server certificate:: $ export JAVA_OPTS="-Djavax.net.ssl.trustStore=/path/to/truststore.jks -Djavax.net.ssl.trustStorePassword=changeme $JAVA_OPTS " Configuring layer ^^^^^^^^^^^^^^^^^ The initial layer configuration panel for an Elasticsearch layer will include an additional pop-up showing a table of available fields. .. |field_list| image:: images/elasticsearch_fieldlist.png :scale: 100% :align: middle +--------------+ | |field_list| | +--------------+ .. list-table:: :widths: 20 80 * - Item - Description * - ``Use All`` - Use all fields in the layer feature type * - ``Short Names`` - For hierarchical documents with inner fields (e.g. ``parent.child.field_name``), only use the base name (``field_name``) in the schema. Note, full path will always be included when the base name is duplicated across fields. * - ``Use`` - Used to select the fields that will make up the layer feature type * - ``Name`` - Name of the field * - ``Type`` - Type of the field, as derived from the Elasticsearch schema. For geometry types, you have the option to provide a more specific data type. * - ``Order`` - Integer order values are used to sort fields, where fields with smaller order are returned first * - ``Custom Name`` - Provides the option to give the field a custom name * - ``Default Geometry`` - Indicates if the geometry field is the default one. Useful if the documents contain more than one geometry field, as SLDs and spatial filters will hit the default geometry field unless otherwise specified * - ``Stored`` - Indicates whether the field is stored in the index * - ``Analyzed`` - Indicates whether the field is analyzed * - ``SRID`` - Native spatial reference ID of the geometries. Currently only EPSG:4326 is supported. * - ``Date Format`` - Date format used for parsing field values and printing filter elements To return to the field table after it has been closed, click the "Configure Elasticsearch fields" button below the "Feature Type Details" panel on the layer configuration page. .. |field_list_edit| image:: images/elasticsearch_fieldlist_edit.png :scale: 100% :align: middle +-------------------+ | |field_list_edit| | +-------------------+ Configuring logging ^^^^^^^^^^^^^^^^^^^ Logging is configurable through Log4j. The data store includes logging such as the query object being sent to Elasticsearch, which is logged at a lower level than may be enabled by default. To enable these logs, add the following lines to the GeoServer logging configuration file (see GeoServer Global Settings):: log4j.category.mil.nga.giat.data.elasticsearch=DEBUG log4j.category.mil.nga.giat.process.elasticsearch=DEBUG The logging configuration file will be in the ``logs`` subdirectory in the GeoServer data directory. Check GeoServer global settings for which file is being used (e.g. ``DEFAULT_LOGGING.properties``, etc.). .. |logging| image:: images/elasticsearch_logging.png :scale: 100% :align: middle +-----------+ | |logging| | +-----------+ Filtering --------- Filtering capabilities include OpenGIS simple comparisons, temporal comparisons, as well as other common filter comparisons. Elasticsearch natively supports numerous spatial filter operators, depending on the type: - ``geo_shape`` types natively support BBOX/Intersects, Within and Disjoint binary spatial operators - ``geo_point`` types natively support BBOX and Within binary spatial operators, as well as the DWithin and Beyond distance buffer operators Requests involving spatial filter operators not natively supported by Elasticsearch will include an additional filtering operation on the results returned from the query, which may impact performance. Native queries ^^^^^^^^^^^^^^ Native Elasticsearch queries can be applied in WFS/WMS feature requests by including the ``q:{query_body}`` key:value pair in the ``viewparams`` parameter (see GeoServer SQL Views documentation for more information). If supplied, the query is combined with the query derived from the request bbox, CQL or OGC filter using the AND logical binary operator. Examples ^^^^^^^^ BBOX and CQL filter:: http://localhost:8080/geoserver/test/wms?service=WMS&version=1.1.0&request=GetMap &layers=test:active&styles=&bbox=-1,-1,10,10&width=279&height=512 &srs=EPSG:4326&format=application/openlayers&maxFeatures=1000 &cql_filter=standard_ss='IEEE 802.11b' BBOX and native query:: http://localhost:8080/geoserver/test/wms?service=WMS&version=1.1.0&request=GetMap &layers=test:active&styles=&bbox=-1,-1,10,10&width=279&height=512 &srs=EPSG:4326&format=application/openlayers&maxFeatures=1000 &viewparams=q:{"term":{"standard_ss":"IEEE 802.11b"}} Native query with BBOX filter:: http://localhost:8080/geoserver/test/wms?service=WMS&version=1.1.0&request=GetMap &layers=test:active&styles=&bbox=-1,-1,10,10&width=279&height=512 &srs=EPSG:4326&format=application/openlayers&maxFeatures=1000 &viewparams=q:{"term":{"standard_ss":"IEEE 802.11b"}} Note that commas in native queries must be escaped with a backslash. Aggregations ------------ Elasticsearch aggregations are supported through WFS/WMS requests by including the ``a:{aggregation_body}`` key:value pair in the ``viewparams`` parameter (see GeoServer SQL Views documentation for more information):: http://localhost:8080/geoserver/test/ows?service=WFS&version=1.0.0&request=GetFeature &typeName=test:active&bbox=0.0,0.0,24.0,44.0 &viewparams=a:{"agg": {"geohash_grid": {"field": "geo"\, "precision": 3}}} Aggregation WFS features will include a single attribute, ``_aggregation``, containing the raw aggregation content. Note that size is set to zero when an aggregation is supplied so only aggregation features are returned (e.g. maxFeatures is ignored and there will be no search hit results). See FAQ_ for common issues using aggregations. Geohash grid aggregations ^^^^^^^^^^^^^^^^^^^^^^^^^ Geohash grid aggregation support includes dynamic precision updating and a custom rendering transformation for visualization. Geohash grid aggregation precision is updated dynamically to approximate the specified ``grid_size`` based on current bbox extent and the additional ``grid_threshold`` parameter as described above. If a ``precision`` value is present in the aggregation defined in ``viewparams``, however, that value will be used instead of the dynamic value. Geohash grid aggregation visualization is supported in WMS requests through a custom rendering transformation, ``vec:GeoHashGrid``, which translates aggregation response data into a raster for display. By default raster values correspond to the aggregation bucket ``doc_count``. The following shows an example GeoServer style that uses the GeoHashGrid rendering transformation:: GeoHashGrid GeoHashGrid GeoHashGrid aggregation data gridStrategy Basic pixelsPerCell 1 outputBBOX wms_bbox outputWidth wms_width outputHeight wms_height geo 0.6 Example WMS request including Geohash grid aggregation with the above custom style:: http://localhost:8080/geoserver/test/wms?service=WMS&version=1.1.0&request=GetMap &layers=test:active&styles=geohashgrid&bbox=0.0,0.0,24.0,44.0&srs=EPSG:4326 &width=418&height=768&format=application/openlayers &viewparams=a:{"agg": {"geohash_grid": {"field": "geo"\, "precision": 3}}} Grid Strategy ^^^^^^^^^^^^^ ``gridStrategy``: Parameter to identify the ``mil.nga.giat.process.elasticsearch.GeoHashGrid`` implementation that will be used to convert each geohashgrid bucket into a raster value (number). .. list-table:: :widths: 20 20 20 40 * - Name - gridStrategy - gridStrategyArgs - Description * - Basic - ``basic`` - no - Raster value is geohashgrid bucket ``doc_count``. * - Metric - ``metric`` - yes - Raster value is geohashgrid bucket metric value. * - Nested - ``nested_agg`` - yes - Extract raster value from nested aggregation results. ``gridStrategyArgs``: (Optional) Parameter used to specify an optional argument list for the grid strategy. ``emptyCellValue``: (Optional) Parameter used to specify the value for empty grid cells. By default, empty grid cells are set to ``0``. ``scaleMin``, ``scaleMax``: (Optional) Parameters used to specify a scale applied to all raster values. Each tile request is scaled according to the min and max values for that tile. It is best to use a non-tiled layer with this parameter to avoid confusing results. ``useLog``: (Optional) Flag indicating whether to apply logarithm to raster values (applied prior to scaling, if applicable) Basic ~~~~~ Raster value is geohashgrid bucket ``doc_count``. Example Aggregation:: { "agg": { "geohash_grid": { "field": "geo" } } } Example bucket:: { "key" : "xv", "doc_count" : 1 } Extracted raster value: ``1`` Metric ~~~~~~ Raster value is geohashgrid bucket metric value. .. list-table:: :widths: 20 20 60 * - Argument Index - Default Value - Description * - 0 - ``metric`` - Key used to pluck metric object from top level bucket. Empty string results in plucking doc_count. * - 1 - ``value`` - Key used to pluck the value from the metric object. Example Aggregation:: { "agg": { "geohash_grid": { "field": "geo" }, "aggs": { "metric": { "max": { "field": "magnitude" } } } } } Example bucket:: { "key" : "xv", "doc_count" : 1, "metric" : { "value" : 4.9 } } Extracted raster value: ``4.9`` Nested ~~~~~~~~~~ Extract raster value from nested aggregation results. .. list-table:: :widths: 20 20 60 * - Argument Index - Default Value - Description * - 0 - ``nested`` - Key used to pluck nested aggregation results from the geogrid bucket. * - 1 - empty string - Key used to pluck metric object from each nested aggregation bucket. Empty string results in plucking doc_count. * - 2 - ``value`` - Key used to pluck the value from the metric object. * - 3 - ``largest`` - ``largest`` | ``smallest``. Strategy used to select a bucket from the nested aggregation buckets. The grid cell raster value is extracted from the selected bucket. * - 4 - ``value`` - ``key`` | ``value``. Strategy used to extract the raster value from the selected bucket. ``value``: Raster value is the selected bucket's metric value. ``key``: Raster value is the selected bucket's key. * - 5 - null - (Optional) Map used to convert String keys into numeric values. Use the format ``key1:1;key2:2``. Only utilized when raster strategy is ``key``. Example Aggregation:: { "agg": { "geohash_grid": { "field": "geo" }, "aggs": { "nested": { "histogram": { "field": "magnitude", "interval": 1, "min_doc_count": 1 } } } } } Example Parameters:: gridStrategyArgs nested largest key Example bucket:: { "key" : "xv", "doc_count" : 1729, "nested" : { "buckets" : [ { "key" : 2.0, "doc_count" : 5 }, { "key" : 3.0, "doc_count" : 107 }, { "key" : 4.0, "doc_count" : 1506 }, { "key" : 5.0, "doc_count" : 100 }, { "key" : 6.0, "doc_count" : 11 } ] } } Extracted raster value: ``4.0`` Implementing a custom Grid Strategy ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ By default the raster values computed in the geohash grid aggregation rendering transformation correspond to the top level ``doc_count``. Adding an additional strategy for computing the raster values from bucket data currently requires source code updates to the ``gt-elasticsearch-process`` module as described below. First create a custom implementation of ``mil.nga.giat.process.elasticsearch.GeoHashGrid`` and provide an implementation of the ``computeCellValue`` method, which takes the raw bucket data and returns the raster value. For example the default basic implementation simply returns the doc_count:: public class BasicGeoHashGrid extends GeoHashGrid { @Override public Number computeCellValue(Map bucket) { return (Number) bucket.get("doc_count"); } } Then update ``mil.nga.giat.process.elasticsearch.GeoHashGridProcess`` and add a new entry to the Strategy enum to point to the custom implementation. After deploying the customized plugin the new geohash grid computer can be used by updating the ``gridStrategy`` parameter in the GeoServer style:: ... gridStrategy NewName .. _FAQ: FAQ --- - By default arrays are returned directly, which is suitable for many output formats including GeoJSON. When using CSV output format with layers containing arrays it's necessary to set the ``array_encoding`` store parameter to ``CSV``. Note however when using the ``CSV`` array encoding that only the first value will be returned. - When updating from pre-2.11.0 versions of the plugin it may be necessary to reload older layers to enable full aggregation and time support. Missing aggregation data or errors of the form ``IllegalArgumentException: Illegal pattern component`` indicate a layer reload is necessary. In this case the layer must be removed and re-added to GeoServer (e.g. a feature type reload will not be sufficient). - Commas in the native query and aggregation body must be escaped with a backslash. Additionally body may need to be URL encoded. - Geometry property name in the aggregation SLD RasterSymbolizer must be a valid geometry property in the layer - ``PropertyIsEqualTo`` maps to an Elasticsearch term query, which will return documents that contain the supplied term. When searching on an analyzed string field, ensure that the search values are consistent with the analyzer used in the index. For example, values may need to be lowercase when querying fields analyzed with the default analyzer. See the Elasticsearch term query documentation for more information. - ``PropertyIsLike`` maps to either a query string query or a regexp query, depending on whether the field is analyzed or not. Reserved characters should be escaped as applicable. Note case sensitive and insensitive searches may not be supported for analyzed and not analyzed fields, respectively. See Elasticsearch query string and regexp query documentation for more information. - Date conversions are handled using the date format from the associated type mapping, or ``date_optional_time`` if not found. Note that UTC timezone is used for both parsing and printing of dates. - Filtering on Elasticsearch ``object`` types is supported. By default, field names will include the full path to the field (e.g. "parent.child.field_name"), but this can be changed in the GeoServer layer configuration. - When referencing fields with path elements using ``cql_filter``, it may be necessary to quote the name (e.g. ``cql_filter="parent.child.field_name"='value'``) - Filtering on Elasticsearch ``nested`` types is supported only for non-geospatial fields. - Circle geometries are approximate and may not be fully consistent with the implementation in Elasticsearch, especially at extreme latitudes (see `#86 `_). - The ``joda-shaded`` module may need to be excluded when importing the project into Eclipse. Otherwise modules may have build errors of the form ``DateTimeFormatter cannot be resolved to a type``. ================================================ FILE: gs-web-elasticsearch/pom.xml ================================================ 4.0.0 elasticgeo mil.nga.giat 2.16-SNAPSHOT gs-web-elasticsearch 2.16-SNAPSHOT jar GeoServer Elasticsearch Module mil.nga.giat gt-elasticsearch ${project.version} mil.nga.giat gt-elasticsearch-process ${project.version} org.geoserver gs-main ${geoserver.version} provided org.geoserver.web gs-web-core ${geoserver.version} provided org.geotools gt-main ${geotools.version} provided org.geoserver.web gs-web-core ${geoserver.version} test-jar test org.geoserver gs-main ${geoserver.version} test-jar test com.mockrunner mockrunner 0.3.6 test junit junit 4.11 test ${basedir}/src/main/java applicationContext.xml **/*.html ${basedir}/src/main/resources **/* maven-assembly-plugin 2.4 src/assembly/dist.xml false elasticgeo-${project.version} org.apache.maven.plugins maven-shade-plugin 2.2 package shade *:pom:* *:maven*:* commons-codec commons-collections commons-httpclient commons-logging *:guava:* javax.media:jai*:* *:jts:* *:*:jsr305 log4j:log4j *:* META-INF/maven/** META-INF/*.SF META-INF/*.DSA META-INF/*.RSA META-INF/DEPENDENCIES META-INF/LICENSE LICENSE false false false elasticgeo-${project.version} ================================================ FILE: gs-web-elasticsearch/src/assembly/dist.xml ================================================ bin zip false ${project.build.directory} / elasticgeo*.jar . / LICENSE ================================================ FILE: gs-web-elasticsearch/src/main/java/applicationContext.xml ================================================ Elasticsearch ================================================ FILE: gs-web-elasticsearch/src/main/java/mil/nga/giat/elasticsearch/ElasticAttributeProvider.java ================================================ /* (c) 2014 Open Source Geospatial Foundation - all rights reserved * This code is licensed under the GPL 2.0 license, available at the root * application directory. */ package mil.nga.giat.elasticsearch; import java.util.Arrays; import java.util.List; import mil.nga.giat.data.elasticsearch.ElasticAttribute; import org.geoserver.web.wicket.GeoServerDataProvider; /** * * Provide attributes from Elasticsearch fields. * */ class ElasticAttributeProvider extends GeoServerDataProvider { private static final long serialVersionUID = -1021780286733349153L; private final List attributes; /** * Name of field */ static final Property NAME = new BeanProperty<>("name", "displayName"); /** * Class type of field */ static final Property TYPE = new AbstractProperty( "type") { private static final long serialVersionUID = 4454312983828267130L; @Override public Object getPropertyValue(ElasticAttribute item) { if (item.getType() != null) { return item.getType().getSimpleName(); } return null; } }; /** * Mark as the default geometry */ static final Property DEFAULT_GEOMETRY = new BeanProperty<>( "defaultGeometry", "defaultGeometry"); /** * SRID of geometric field */ static final Property SRID = new BeanProperty<>("srid", "srid"); /** * Use field in datastore */ static final Property USE = new BeanProperty<>("use", "use"); /** * Store if the field is in use in datastore */ static final Property DATE_FORMAT = new BeanProperty<>("dateFormat", "dateFormat"); /** * If field is analyzed */ static final Property ANALYZED = new BeanProperty<>("analyzed", "analyzed"); /** * If field is stored */ static final Property STORED = new BeanProperty<>("stored", "stored"); /** * Order of the field */ static final Property ORDER = new BeanProperty<>("order", "order"); /** * Custom name of the field */ static final Property CUSTOM_NAME = new BeanProperty<>("customName", "customName"); /** * Build attribute provider * * @param attributes list to use as source for provider */ public ElasticAttributeProvider(List attributes) { this.attributes = attributes; } @Override protected List> getProperties() { return Arrays.asList(USE, NAME, TYPE, ORDER, CUSTOM_NAME, DEFAULT_GEOMETRY, STORED, ANALYZED, SRID, DATE_FORMAT); } @Override protected List getItems() { return attributes; } } ================================================ FILE: gs-web-elasticsearch/src/main/java/mil/nga/giat/elasticsearch/ElasticConfigurationPage.html ================================================ <wicket:message key="title">Elasticsearch fields configuration</wicket:message>
Use All Short Names
[Feedback Panel]
================================================ FILE: gs-web-elasticsearch/src/main/java/mil/nga/giat/elasticsearch/ElasticConfigurationPage.java ================================================ /* (c) 2014 Open Source Geospatial Foundation - all rights reserved * This code is licensed under the GPL 2.0 license, available at the root * application directory. */ package mil.nga.giat.elasticsearch; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import mil.nga.giat.data.elasticsearch.ElasticAttribute; import mil.nga.giat.data.elasticsearch.ElasticDataStore; import mil.nga.giat.data.elasticsearch.ElasticLayerConfiguration; import org.apache.wicket.AttributeModifier; import org.apache.wicket.Component; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.form.AjaxButton; import org.apache.wicket.ajax.markup.html.form.AjaxCheckBox; import org.apache.wicket.behavior.AttributeAppender; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.CheckBox; import org.apache.wicket.markup.html.form.DropDownChoice; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.IChoiceRenderer; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.list.ListItem; import org.apache.wicket.markup.html.panel.FeedbackPanel; import org.apache.wicket.markup.html.panel.Fragment; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.CompoundPropertyModel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; import org.geoserver.catalog.Catalog; import org.geoserver.catalog.CatalogBuilder; import org.geoserver.catalog.DataStoreInfo; import org.geoserver.catalog.FeatureTypeInfo; import org.geoserver.catalog.LayerInfo; import org.geoserver.catalog.ResourceInfo; import org.geoserver.web.GeoServerApplication; import org.geoserver.web.wicket.GeoServerDataProvider.Property; import org.geoserver.web.wicket.GeoServerTablePanel; import org.geoserver.web.wicket.ParamResourceModel; import org.geotools.data.util.NullProgressListener; import org.geotools.feature.NameImpl; import org.geotools.util.logging.Logging; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryCollection; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.MultiLineString; import org.locationtech.jts.geom.MultiPoint; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import org.opengis.feature.type.Name; /** * Class to render and manage the Elasticsearch modal dialog This dialog allow * the user to choice which Elasticsearch attributes include in layers, selects * attribute to use as GEOMETRY. */ abstract class ElasticConfigurationPage extends Panel { private static final long serialVersionUID = 5615867383881988931L; private static final Logger LOGGER = Logging.getLogger(ElasticConfigurationPage.class); private final String useAllMarkupId; private static final List> GEOMETRY_TYPES = Arrays.asList(Geometry.class, GeometryCollection.class, Point.class, MultiPoint.class, LineString.class, MultiLineString.class, Polygon.class, MultiPolygon.class); /** * Constructs the dialog to set Elasticsearch attributes and configuration * options. * * @see ElasticAttributeProvider * @see ElasticAttribute * */ public ElasticConfigurationPage(String panelId, final IModel model) { super(panelId, model); ResourceInfo ri = (ResourceInfo) model.getObject(); @SuppressWarnings("unchecked") final Form elastic_form = new Form("es_form", new CompoundPropertyModel(this)); add(elastic_form); List attributes; attributes = fillElasticAttributes(ri).getAttributes(); final ElasticAttributeProvider attProvider = new ElasticAttributeProvider(attributes); final GeoServerTablePanel elasticAttributePanel; elasticAttributePanel = getElasticAttributePanel(attProvider); elastic_form.add(elasticAttributePanel); // select all check box boolean selectAll = true; for (final ElasticAttribute attribute : attributes) { if (attribute.isUse() == null || !attribute.isUse()) { selectAll = false; } } AjaxCheckBox useAllCheckBox = new AjaxCheckBox("useAll", Model.of(selectAll)) { @Override protected void onUpdate(AjaxRequestTarget target) { final boolean use = (Boolean) this.getDefaultModelObject(); for (final ElasticAttribute attribute : attProvider.getItems()) { attribute.setUse(use); } target.add(elasticAttributePanel); } }; useAllCheckBox.setOutputMarkupId(true); elastic_form.add(useAllCheckBox); useAllMarkupId = useAllCheckBox.getMarkupId(); // use short name check box final Boolean useShortName; if (!attributes.isEmpty() && attributes.get(0).getUseShortName() != null) { useShortName = attributes.get(0).getUseShortName(); } else { useShortName = false; } AjaxCheckBox checkBox = new AjaxCheckBox("useShortName", Model.of(useShortName)) { @Override protected void onUpdate(AjaxRequestTarget target) { final boolean useShortName = (Boolean) this.getDefaultModelObject(); for (final ElasticAttribute attribute : attProvider.getItems()) { attribute.setUseShortName(useShortName); } target.add(elasticAttributePanel); } }; checkBox.setOutputMarkupId(true); elastic_form.add(checkBox); elastic_form.add(new AjaxButton("es_save") { protected void onSubmit(AjaxRequestTarget target, Form form) { onSave(target); } }); FeedbackPanel feedbackPanel = new FeedbackPanel("es_feedback"); feedbackPanel.setOutputMarkupId(true); elastic_form.add(feedbackPanel); } /** * Do nothing */ @SuppressWarnings("unused") protected void onCancel(AjaxRequestTarget target) { done(target, null, null); } /** * Validates Elasticsearch attributes configuration and stores the * Elasticsearch layer configuration into feature type metadata as * {@link ElasticLayerConfiguration#KEY}
* Validation include the follow rules
  • One attribute must be a GEOMETRY. * * @see ElasticLayerConfiguration * @see FeatureTypeInfo#getMetadata */ private void onSave(AjaxRequestTarget target) { try { ResourceInfo ri = (ResourceInfo) getDefaultModel().getObject(); ElasticLayerConfiguration layerConfig = fillElasticAttributes(ri); boolean geomSet = false; // Validate configuration for (ElasticAttribute att : layerConfig.getAttributes()) { if (Geometry.class.isAssignableFrom(att.getType()) && att.isUse()) { geomSet = true; } } if (!geomSet) { error(new ParamResourceModel("geomEmptyFailure", ElasticConfigurationPage.this) .getString()); } Catalog catalog = ((GeoServerApplication) this.getPage().getApplication()).getCatalog(); FeatureTypeInfo typeInfo; DataStoreInfo dsInfo = catalog.getStore(ri.getStore().getId(), DataStoreInfo.class); ElasticDataStore ds = (ElasticDataStore) dsInfo.getDataStore(null); CatalogBuilder builder = new CatalogBuilder(catalog); builder.setStore(dsInfo); typeInfo = builder.buildFeatureType(ds.getFeatureSource(ri.getQualifiedName())); typeInfo.setName(ri.getName()); typeInfo.getMetadata().put(ElasticLayerConfiguration.KEY, layerConfig); LayerInfo layerInfo = builder.buildLayer(typeInfo); layerInfo.setName(ri.getName()); done(target, layerInfo, layerConfig); } catch (Exception e) { LOGGER.log(Level.SEVERE, e.getMessage(), e); error(new ParamResourceModel("creationFailure", this, e).getString()); } } /* * Load ElasticLayerConfiguration configuration before shows on table Reloads * Elasticsearch attributes from datastore and merge it with user attributes * configurations */ private ElasticLayerConfiguration fillElasticAttributes(ResourceInfo ri) { ElasticLayerConfiguration layerConfig = (ElasticLayerConfiguration) ri.getMetadata() .get(ElasticLayerConfiguration.KEY); if (layerConfig == null) { layerConfig = new ElasticLayerConfiguration(ri.getName()); ri.getMetadata().put(ElasticLayerConfiguration.KEY, layerConfig); } try { ElasticDataStore dataStore = (ElasticDataStore) ((DataStoreInfo) ri.getStore()) .getDataStore(new NullProgressListener()); ArrayList result = new ArrayList<>(); Map tempMap = new HashMap<>(); final List attributes = layerConfig.getAttributes(); for (ElasticAttribute att : attributes) { tempMap.put(att.getName(), att); } final String docType = layerConfig.getDocType(); final Name layerName = new NameImpl(layerConfig.getLayerName()); dataStore.getDocTypes().put(layerName, docType); for (ElasticAttribute at : dataStore.getElasticAttributes(layerName)) { if (tempMap.containsKey(at.getName())) { at = tempMap.get(at.getName()); } result.add(at); } layerConfig.getAttributes().clear(); layerConfig.getAttributes().addAll(result); } catch (Exception e) { LOGGER.log(Level.SEVERE, e.getMessage(), e); } Collections.sort(layerConfig.getAttributes()); return layerConfig; } /* * Builds attribute table */ private GeoServerTablePanel getElasticAttributePanel( ElasticAttributeProvider attProvider) { GeoServerTablePanel atts = new GeoServerTablePanel( "esAttributes", attProvider) { @Override protected Component getComponentForProperty(String id, IModel itemModel, Property property) { ElasticAttribute att = itemModel.getObject(); boolean isGeometry = att.getType() != null && Geometry.class.isAssignableFrom(att.getType()); if (property == ElasticAttributeProvider.NAME && isGeometry) { Fragment f = new Fragment(id, "label", ElasticConfigurationPage.this); f.add(new Label("label", att.getDisplayName() + "*")); return f; } else if (property == ElasticAttributeProvider.TYPE && isGeometry) { Fragment f = new Fragment(id, "geometry", ElasticConfigurationPage.this); //noinspection unchecked f.add(new DropDownChoice("geometry", new PropertyModel(itemModel, "type"), GEOMETRY_TYPES, new GeometryTypeRenderer())); return f; } else if (property == ElasticAttributeProvider.USE) { CheckBox checkBox = new CheckBox("use", new PropertyModel<>(itemModel, "use")); final String onclick = "document.getElementById(\"" + useAllMarkupId + "\").checked = false;"; checkBox.add(new AttributeAppender("onclick", new Model<>(onclick), ";")); Fragment f = new Fragment(id, "checkboxUse", ElasticConfigurationPage.this); f.add(checkBox); return f; } else if (property == ElasticAttributeProvider.DEFAULT_GEOMETRY) { if (isGeometry) { Fragment f = new Fragment(id, "checkboxDefaultGeometry", ElasticConfigurationPage.this); f.add(new CheckBox("defaultGeometry", new PropertyModel<>(itemModel, "defaultGeometry"))); return f; } else { return new Fragment(id, "empty", ElasticConfigurationPage.this); } } else if (property == ElasticAttributeProvider.SRID) { if (isGeometry) { Fragment f = new Fragment(id, "label", ElasticConfigurationPage.this); f.add(new Label("label", String.valueOf(att.getSrid()))); return f; } else { return new Fragment(id, "empty", ElasticConfigurationPage.this); } } else if (property == ElasticAttributeProvider.DATE_FORMAT) { if (att.getDateFormat() != null) { Fragment f = new Fragment(id, "label", ElasticConfigurationPage.this); f.add(new Label("label", String.valueOf(att.getDateFormat()))); return f; } else { return new Fragment(id, "empty", ElasticConfigurationPage.this); } } else if (property == ElasticAttributeProvider.ANALYZED) { if (att.getAnalyzed() != null && att.getAnalyzed()) { Fragment f = new Fragment(id, "label", ElasticConfigurationPage.this); f.add(new Label("label", "x")); return f; } else { return new Fragment(id, "empty", ElasticConfigurationPage.this); } } else if (property == ElasticAttributeProvider.STORED) { if (att.isStored()) { Fragment f = new Fragment(id, "label", ElasticConfigurationPage.this); f.add(new Label("label", "x")); return f; } else { return new Fragment(id, "empty", ElasticConfigurationPage.this); } } else if (property == ElasticAttributeProvider.ORDER) { TextField order = new TextField<>("order", new PropertyModel<>(itemModel, "order")); Fragment f = new Fragment(id, "textOrderValue", ElasticConfigurationPage.this); f.add(order); return f; } else if (property == ElasticAttributeProvider.CUSTOM_NAME) { TextField customName = new TextField<>("customName", new PropertyModel<>(itemModel, "customName")); Fragment f = new Fragment(id, "textCustomNameValue", ElasticConfigurationPage.this); f.add(customName); return f; } return null; } @Override protected void onPopulateItem(Property property, ListItem> item) { if (property == ElasticAttributeProvider.STORED) { item.add(new AttributeModifier("style",Model.of("text-align:center"))); } else if (property == ElasticAttributeProvider.ANALYZED) { item.add(new AttributeModifier("style",Model.of("text-align:center"))); } } }; atts.setOutputMarkupId(true); atts.setFilterVisible(false); atts.setSortable(false); atts.setPageable(false); atts.setOutputMarkupId(true); return atts; } /* * Render geometry type select */ private static class GeometryTypeRenderer implements IChoiceRenderer { public Object getDisplayValue(Object object) { return ((Class) object).getSimpleName(); } public String getIdValue(Object object, int index) { return (String) getDisplayValue(object); } @Override public Object getObject(String id, IModel> choices) { for (Class c : GEOMETRY_TYPES) { if (id.equals(getDisplayValue(c))) { return c; } } return null; } } /** * Abstract method to implements in panel that opens the dialog to close the dialog itself
    * This method is called after modal executes its operation * * @param target ajax response target * @param layerInfo GeoServer layer configuration * @param layerConfig Elasticsearch layer configuration * * @see #onSave * @see #onCancel * */ abstract void done(AjaxRequestTarget target, LayerInfo layerInfo, ElasticLayerConfiguration layerConfig); } ================================================ FILE: gs-web-elasticsearch/src/main/java/mil/nga/giat/elasticsearch/ElasticConfigurationPanel.html ================================================
    ================================================ FILE: gs-web-elasticsearch/src/main/java/mil/nga/giat/elasticsearch/ElasticConfigurationPanel.java ================================================ /* (c) 2014 Open Source Geospatial Foundation - all rights reserved * This code is licensed under the GPL 2.0 license, available at the root * application directory. */ package mil.nga.giat.elasticsearch; import java.io.IOException; import java.util.List; import java.util.logging.Level; import mil.nga.giat.data.elasticsearch.ElasticAttribute; import mil.nga.giat.data.elasticsearch.ElasticDataStore; import mil.nga.giat.data.elasticsearch.ElasticLayerConfiguration; import org.apache.wicket.Component; import org.apache.wicket.MarkupContainer; import org.apache.wicket.ajax.AbstractDefaultAjaxBehavior; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.OnLoadHeaderItem; import org.apache.wicket.markup.html.panel.Fragment; import org.apache.wicket.model.IModel; import org.geoserver.catalog.Catalog; import org.geoserver.catalog.CatalogBuilder; import org.geoserver.catalog.DataStoreInfo; import org.geoserver.catalog.FeatureTypeInfo; import org.geoserver.catalog.LayerInfo; import org.geoserver.catalog.ResourceInfo; import org.geoserver.web.GeoServerApplication; import org.geoserver.web.data.resource.ResourceConfigurationPage; import org.geoserver.web.data.resource.ResourceConfigurationPanel; import org.geoserver.web.wicket.ParamResourceModel; import org.geotools.feature.NameImpl; import org.opengis.feature.type.Name; /** * Resource configuration panel to show a link to open Elasticsearch attribute * modal dialog
    If the Elasticsearch attribute are not configured for * current layer, the modal dialog will be open at first resource configuration * window opening
    After modal dialog is closed the resource page is * reloaded and feature configuration table updated * */ @SuppressWarnings("WeakerAccess") public class ElasticConfigurationPanel extends ResourceConfigurationPanel { private static final long serialVersionUID = 3382530429105288433L; private LayerInfo _layerInfo; private ElasticLayerConfiguration _layerConfig; /** * Adds Elasticsearch configuration panel link, configure modal dialog and * implements modal callback. * * @see ElasticConfigurationPage#done */ public ElasticConfigurationPanel(final String panelId, final IModel model) { super(panelId, model); final FeatureTypeInfo fti = (FeatureTypeInfo) model.getObject(); final ModalWindow modal = new ModalWindow("modal"); modal.setInitialWidth(800); modal.setTitle(new ParamResourceModel("modalTitle", ElasticConfigurationPanel.this)); if (fti.getMetadata().get(ElasticLayerConfiguration.KEY) == null) { modal.add(new OpenWindowOnLoadBehavior()); } modal.setContent(new ElasticConfigurationPage(panelId, model) { @Override void done(AjaxRequestTarget target, LayerInfo layerInfo, ElasticLayerConfiguration layerConfig) { _layerInfo = layerInfo; _layerConfig = layerConfig; try { saveLayer((FeatureTypeInfo) getResourceInfo()); } catch (IOException e) { LOGGER.log(Level.SEVERE, e.getMessage(), e); error(new ParamResourceModel("creationFailure", this, e).getString()); } MarkupContainer parent = ElasticConfigurationPanel.this.getParent(); while (!(parent == null || parent instanceof ResourceConfigurationPage)) { parent = parent.getParent(); } if (parent != null) { ResourceInfo ri = ElasticConfigurationPanel.this.getResourceInfo(); ((ResourceConfigurationPage) parent).updateResource(ri, target); } modal.close(target); } }); add(modal); AjaxLink findLink = new AjaxLink("edit") { @Override public void onClick(AjaxRequestTarget target) { modal.show(target); } }; final Fragment attributePanel = new Fragment("esPanel", "esPanelFragment", this); attributePanel.setOutputMarkupId(true); add(attributePanel); attributePanel.add(findLink); } /* * Open modal dialog on window load */ private class OpenWindowOnLoadBehavior extends AbstractDefaultAjaxBehavior { @Override protected void respond(AjaxRequestTarget target) { ModalWindow window = (ModalWindow) getComponent(); window.show(target); } @Override public void renderHead(Component component, IHeaderResponse response) { response.render(OnLoadHeaderItem.forScript(getCallbackScript().toString())); } } private void saveLayer(FeatureTypeInfo ft) throws IOException { GeoServerApplication app = (GeoServerApplication) getApplication(); Catalog catalog = app.getCatalog(); String namespace = ft.getNamespace().getURI(); Name qualifiedName = new NameImpl(namespace, _layerInfo.getName()); LayerInfo layerInfo = catalog.getLayerByName(qualifiedName); boolean isNew = ft.getId() == null || app.getCatalog().getResource(ft.getId(),ResourceInfo.class) == null; FeatureTypeInfo typeInfo; if (layerInfo == null || isNew) { // New DataStoreInfo dsInfo; dsInfo = catalog.getStore(ft.getStore().getId(), DataStoreInfo.class); ElasticDataStore ds = (ElasticDataStore) dsInfo.getDataStore(null); CatalogBuilder builder = new CatalogBuilder(catalog); builder.setStore(dsInfo); ElasticLayerConfiguration layerConfig; layerConfig = new ElasticLayerConfiguration(_layerConfig); layerConfig.setLayerName(_layerInfo.getName()); layerConfig.getAttributes().clear(); List attributes = _layerConfig.getAttributes(); layerConfig.getAttributes().addAll(attributes); ds.setLayerConfiguration(layerConfig); FeatureTypeInfo _typeInfo = (FeatureTypeInfo) _layerInfo.getResource(); typeInfo = builder.buildFeatureType(ds.getFeatureSource(qualifiedName)); typeInfo.setName(_layerInfo.getName()); typeInfo.getMetadata().put(ElasticLayerConfiguration.KEY, layerConfig); typeInfo.setEnabled(_typeInfo.isEnabled()); typeInfo.setAdvertised(_typeInfo.isAdvertised()); typeInfo.setTitle(_typeInfo.getTitle()); typeInfo.setDescription(_typeInfo.getDescription()); typeInfo.setAbstract(_typeInfo.getAbstract()); typeInfo.getKeywords().addAll(_typeInfo.getKeywords()); typeInfo.getMetadataLinks().addAll(_typeInfo.getMetadataLinks()); typeInfo.getDataLinks().addAll(_typeInfo.getDataLinks()); typeInfo.setSRS(_typeInfo.getSRS()); typeInfo.setProjectionPolicy(_typeInfo.getProjectionPolicy()); typeInfo.setNativeBoundingBox(_typeInfo.getNativeBoundingBox()); typeInfo.setLatLonBoundingBox(_typeInfo.getLatLonBoundingBox()); typeInfo.setCircularArcPresent(_typeInfo.isCircularArcPresent()); typeInfo.setLinearizationTolerance(_typeInfo.getLinearizationTolerance()); layerInfo = builder.buildLayer(typeInfo); builder.updateLayer(layerInfo, _layerInfo); layerInfo.setName(_layerInfo.getName()); layerInfo.setResource(typeInfo); } else { // Update typeInfo = (FeatureTypeInfo) layerInfo.getResource(); typeInfo.getMetadata().put(ElasticLayerConfiguration.KEY, _layerConfig); } } } ================================================ FILE: gs-web-elasticsearch/src/main/java/mil/nga/giat/elasticsearch/ElasticConfigurationPanelInfo.java ================================================ /* (c) 2014 Open Source Geospatial Foundation - all rights reserved * This code is licensed under the GPL 2.0 license, available at the root * application directory. */ package mil.nga.giat.elasticsearch; import org.geoserver.catalog.FeatureTypeInfo; import org.geoserver.platform.ExtensionPriority; import org.geoserver.web.data.resource.ResourceConfigurationPanelInfo; /** * * Implements ResourceConfigurationPanelInfo extension point to add Elasticsearch * attribute configuration link on resource page.
    * Priority is reduced under standard {@link ExtensionPriority#LOWEST} to shows * the Elasticsearch link after other panels. * */ class ElasticConfigurationPanelInfo extends ResourceConfigurationPanelInfo implements ExtensionPriority { private static final long serialVersionUID = 1485404586629946126L; @Override public boolean canHandle(Object obj) { boolean canHandle = false; if (obj instanceof FeatureTypeInfo) { FeatureTypeInfo fti = (FeatureTypeInfo) obj; for (String st : getSupportedTypes()) { if (fti.getStore().getType().equals(st)) { canHandle = true; break; } } } return canHandle; } @Override public int getPriority() { return ExtensionPriority.LOWEST + 1; } } ================================================ FILE: gs-web-elasticsearch/src/main/java/mil/nga/giat/elasticsearch/ElasticFeatureTypeCallback.java ================================================ /* (c) 2014 Open Source Geospatial Foundation - all rights reserved * This code is licensed under the GPL 2.0 license, available at the root * application directory. */ package mil.nga.giat.elasticsearch; import mil.nga.giat.data.elasticsearch.ElasticDataStore; import mil.nga.giat.data.elasticsearch.ElasticLayerConfiguration; import static mil.nga.giat.data.elasticsearch.ElasticLayerConfiguration.KEY; import org.geoserver.catalog.FeatureTypeInfo; import org.geoserver.catalog.FeatureTypeCallback; import org.geotools.data.DataAccess; import org.opengis.feature.Feature; import org.opengis.feature.type.FeatureType; import org.opengis.feature.type.Name; /** * * Implementation of FeatureTypeInitializer extension point to initialize * Elasticsearch datastore. * * @see FeatureTypeCallback * */ class ElasticFeatureTypeCallback implements FeatureTypeCallback { @Override public boolean canHandle(FeatureTypeInfo info, DataAccess dataAccess) { return dataAccess instanceof ElasticDataStore; } @Override public boolean initialize(FeatureTypeInfo info, DataAccess dataAccess, Name temporaryName) { ElasticLayerConfiguration layerConfig; layerConfig = (ElasticLayerConfiguration) info.getMetadata().get(KEY); if (layerConfig == null) { layerConfig = new ElasticLayerConfiguration(info.getName()); } ((ElasticDataStore) dataAccess).setLayerConfiguration(layerConfig); return false; } @Override public void dispose(FeatureTypeInfo info, DataAccess dataAccess, Name temporaryName) { final ElasticLayerConfiguration layerConfig = (ElasticLayerConfiguration) info.getMetadata().get(KEY); if (layerConfig != null) { layerConfig.getAttributes().stream() .filter(attr -> attr.getName().equals(info.getName())) .findFirst() .ifPresent(attribute -> layerConfig.getAttributes().remove(attribute)); ((ElasticDataStore) dataAccess).getDocTypes().remove(info.getQualifiedName()); } } @Override public void flush(FeatureTypeInfo info, DataAccess dataAccess) { // nothing to do } } ================================================ FILE: gs-web-elasticsearch/src/main/java/mil/nga/giat/elasticsearch/ElasticXStreamInitializer.java ================================================ /* (c) 2014 Open Source Geospatial Foundation - all rights reserved * (c) 2014 OpenPlans * This code is licensed under the GPL 2.0 license, available at the root * application directory. */ package mil.nga.giat.elasticsearch; import mil.nga.giat.data.elasticsearch.ElasticAttribute; import mil.nga.giat.data.elasticsearch.ElasticLayerConfiguration; import org.geoserver.config.util.XStreamPersister; import org.geoserver.config.util.XStreamPersisterInitializer; import com.thoughtworks.xstream.XStream; /** * * Implementation of XStreamPersisterInitializer extension point to serialize ElasticLayerConfiguration * */ class ElasticXStreamInitializer implements XStreamPersisterInitializer { @Override public void init(XStreamPersister persister) { persister.registerBreifMapComplexType("elasticLayerConfiguration",ElasticLayerConfiguration.class); XStream xs = persister.getXStream(); xs.alias("esAttribute", ElasticAttribute.class); } } ================================================ FILE: gs-web-elasticsearch/src/main/java/mil/nga/giat/elasticsearch/ElasticXStreamPersisterInitializer.java ================================================ package mil.nga.giat.elasticsearch; import org.geoserver.config.util.XStreamPersister; import org.geoserver.config.util.XStreamPersisterInitializer; class ElasticXStreamPersisterInitializer implements XStreamPersisterInitializer { @Override public void init(XStreamPersister persister) { persister.getXStream().allowTypes(new String[] { "mil.nga.giat.data.elasticsearch.ElasticAttribute" }); } } ================================================ FILE: gs-web-elasticsearch/src/main/resources/GeoServerApplication.properties ================================================ # suppress inspection "UnusedProperty" for whole file data.resource.config.elasticsearch = Elasticsearch ElasticConfigurationPanel.modalTitle = Elasticsearch fields configuration ElasticConfigurationPanel.edit = Configure Elasticsearch fields ElasticConfigurationPage.attributes = Attributes ElasticConfigurationPage.th.name = Name ElasticConfigurationPage.th.type = Type ElasticConfigurationPage.th.use = Use ElasticConfigurationPage.th.geometry = Geometry ElasticConfigurationPage.th.srid = SRID ElasticConfigurationPage.th.defaultGeometry = Default Geometry ElasticConfigurationPage.th.dateFormat = Date Format ElasticConfigurationPage.th.analyzed = Analyzed ElasticConfigurationPage.th.stored = Stored ElasticConfigurationPage.th.order = Order ElasticConfigurationPage.th.customName = Custom Name ElasticConfigurationPage.useAll = Use all ElasticConfigurationPage.useShortName = Short names ElasticConfigurationPage.es_save = Apply ElasticConfigurationPage.es_cancel = Cancel ElasticConfigurationPage.creationFailure = Creation failure ElasticConfigurationPage.geomEmptyFailure = Select field for geometry ================================================ FILE: gt-elasticsearch/LGPL ================================================ GNU LESSER GENERAL PUBLIC LICENSE Version 2.1, February 1999 Copyright (C) 1991, 1999 Free Software Foundation, Inc. 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Libraries If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. , 1 April 1990 Ty Coon, President of Vice That's all there is to it! ================================================ FILE: gt-elasticsearch/LICENSE ================================================ This module is licensed under the terms of the GNU Lesser General Public License (LGPL), version 2 or later. The directory containing this file should also contain a copy of the LGPL, as a file named LGPL. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. ================================================ FILE: gt-elasticsearch/pom.xml ================================================ 4.0.0 elasticgeo mil.nga.giat 2.16-SNAPSHOT gt-elasticsearch 2.16-SNAPSHOT jar GeoTools Elasticsearch DataStore docker.elastic.co/elasticsearch/elasticsearch org.springframework.security spring-security-core 5.1.5.RELEASE aopalliance aopalliance provided mil.nga.giat joda-shaded ${project.version} joda-time joda-time org.elasticsearch.client elasticsearch-rest-client ${es.version} org.locationtech.spatial4j spatial4j 0.6 com.fasterxml.jackson.core jackson-core ${jackson.version} com.fasterxml.jackson.core jackson-databind ${jackson.version} com.github.davidmoten geo 0.7.4 com.google.guava guava ${guava.version} log4j log4j ${log4j.version} org.geotools gt-main ${geotools.version} provided org.geotools gt-epsg-hsql ${geotools.version} provided org.geotools gt-geojson ${geotools.version} provided org.geotools gt-cql ${geotools.version} provided org.mockito mockito-core 2.7.5 test junit junit 4.11 test org.hamcrest hamcrest-all 1.3 test true src/main/resources maven-failsafe-plugin 2.19.1 ${failsafeArgLine} -Djava.util.logging.config.file=${project.build.directory}/test-classes/test-classes/logging.properties ${skip.integration.tests} integration-test verify docker !skip.integration.tests true io.fabric8 docker-maven-plugin 0.18.1 elasticsearch ${docker.image}:${es.test.version} -Xmx512m -Xms512m single-node false 9200:9200 http://localhost:9200/_cluster/health?wait_for_status=yellow&timeout=60s GET 200 docker-start pre-integration-test start docker-stop post-integration-test stop ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticAggregation.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import java.util.List; import java.util.Map; @JsonIgnoreProperties(ignoreUnknown = true) class ElasticAggregation { private List> buckets; public List> getBuckets() { return buckets; } public void setBuckets(List> buckets) { this.buckets = buckets; } @Override public String toString() { return "ElasticAggregation[numBuckets=" + (buckets != null ? buckets.size() : 0) + "]"; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticAttribute.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.io.Serializable; import java.util.Objects; import java.util.regex.Pattern; /** * Class describing and Elasticsearch attribute including name, type and * optional information on geometry and date types. Also includes an alternative * short name, if applicable, that can be used instead of the full path both in * the feature type and backend Elasticsearch queries. * */ public class ElasticAttribute implements Serializable, Comparable { public enum ElasticGeometryType { GEO_POINT, GEO_SHAPE } private static final Pattern beginLetters = Pattern.compile("^[A-Za-z_].*"); private static final long serialVersionUID = 8839579461838862328L; private final String name; private String shortName; private Boolean useShortName; private Class type; private ElasticGeometryType geometryType; private Boolean use; private Boolean defaultGeometry; private Integer srid; private String dateFormat; private Boolean analyzed; private boolean stored; private boolean nested; private Integer order; private String customName; public ElasticAttribute(String name) { super(); this.name = name; this.use = true; this.defaultGeometry = false; this.useShortName = false; this.stored = false; this.nested = false; } public ElasticAttribute(ElasticAttribute other) { this.name = other.name; this.shortName = other.shortName; this.type = other.type; this.use = other.use; this.defaultGeometry = other.defaultGeometry; this.srid = other.srid; this.dateFormat = other.dateFormat; this.useShortName = other.useShortName; this.geometryType = other.geometryType; this.analyzed = other.analyzed; this.stored = other.stored; this.nested = other.nested; this.order = other.order; this.customName = other.customName; } public String getName() { return name; } public String getShortName() { return shortName; } public void setShortName(String shortName) { this.shortName = shortName; } public Boolean getUseShortName() { return useShortName; } public void setUseShortName(Boolean useShortName) { this.useShortName = useShortName; } public Class getType() { return type; } public void setType(Class type) { this.type = type; } public ElasticGeometryType getGeometryType() { return geometryType; } public void setGeometryType(ElasticGeometryType geometryType) { this.geometryType = geometryType; } public Boolean isUse() { return use; } public void setUse(Boolean use) { this.use = use; } public Boolean isDefaultGeometry() { return defaultGeometry; } public void setDefaultGeometry(Boolean defaultGeometry) { this.defaultGeometry = defaultGeometry; } public Integer getSrid() { return srid; } public void setSrid(Integer srid) { this.srid = srid; } public String getDateFormat() { return dateFormat; } public void setDateFormat(String dateFormat) { this.dateFormat = dateFormat; } public Boolean getAnalyzed() { return analyzed; } public void setAnalyzed(Boolean analyzed) { this.analyzed = analyzed; } public boolean isStored() { return stored; } public void setStored(boolean stored) { this.stored = stored; } public boolean isNested() { return nested; } public void setNested(boolean nested) { this.nested = nested; } public void setOrder(Integer order) { this.order = order; } public Integer getOrder() { return this.order; } public void setCustomName(String name) { this.customName = normalizeName(name); } public String getCustomName() { return this.customName; } public String getDisplayName() { final String displayName; if (useShortName) { displayName = shortName; } else { displayName = name; } return displayName; } @Override public int hashCode() { return Objects.hash(name, type, use, defaultGeometry, srid, dateFormat, useShortName, geometryType, analyzed, stored, nested, order, customName); } @Override public boolean equals(Object obj) { boolean equal; if (obj == null || getClass() != obj.getClass()) { equal = false; } else { ElasticAttribute other = (ElasticAttribute) obj; equal = Objects.equals(name, other.name); equal &= Objects.equals(type, other.type); equal &= Objects.equals(use, other.use); equal &= Objects.equals(defaultGeometry, other.defaultGeometry); equal &= Objects.equals(srid, other.srid); equal &= Objects.equals(dateFormat, other.dateFormat); equal &= Objects.equals(useShortName, other.useShortName); equal &= Objects.equals(geometryType, other.geometryType); equal &= Objects.equals(analyzed, other.analyzed); equal &= Objects.equals(stored, other.stored); equal &= Objects.equals(nested, other.nested); equal &= Objects.equals(order, other.order); equal &= Objects.equals(customName, other.customName); } return equal; } /** * Implement comparison logic * @param o is a non-null ElasticAttribute * @return negative for before, zero for same, positive after */ @Override public int compareTo(@SuppressWarnings("NullableProblems") ElasticAttribute o) { if (this.order == null) { return o.order == null ? this.name.compareTo(o.name) : 1; } if (o.order == null) { return -1; } int i = this.order.compareTo(o.order); return i == 0 ? this.name.compareTo(o.name) : i; } /** * Perform basic update to the given name to make it XML namespace * compliant. * * @param name Raw name * @return Name that is XML safe */ private static String normalizeName (String name) { String normalName = name; /* XML element naming rules: * 1. Element names must start with a letter or underscore * 2. Element names cannot start with the letters xml * 3. Element names cannot contain spaces */ if (normalName.toLowerCase().startsWith("xml")) { normalName = "_".concat(normalName); } else if (! beginLetters.matcher(normalName).matches()) { normalName = "_".concat(normalName); } /* Simply replace all spaces in the name with "_". */ return normalName.replaceAll(" ", "_"); } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticCapabilities.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.util.HashMap; import java.util.Map; import org.geotools.filter.Capabilities; import org.geotools.filter.capability.FilterCapabilitiesImpl; import org.geotools.filter.capability.TemporalCapabilitiesImpl; import org.geotools.filter.capability.TemporalOperatorImpl; import org.geotools.filter.visitor.IsFullySupportedFilterVisitor; import org.opengis.filter.ExcludeFilter; import org.opengis.filter.Filter; import org.opengis.filter.Id; import org.opengis.filter.IncludeFilter; import org.opengis.filter.PropertyIsBetween; import org.opengis.filter.PropertyIsLike; import org.opengis.filter.PropertyIsNull; import org.opengis.filter.capability.TemporalCapabilities; import org.opengis.filter.capability.TemporalOperators; import org.opengis.filter.spatial.BBOX; import org.opengis.filter.spatial.Beyond; import org.opengis.filter.spatial.Contains; import org.opengis.filter.spatial.DWithin; import org.opengis.filter.spatial.Disjoint; import org.opengis.filter.spatial.Intersects; import org.opengis.filter.spatial.Within; import org.opengis.filter.temporal.After; import org.opengis.filter.temporal.AnyInteracts; import org.opengis.filter.temporal.Before; import org.opengis.filter.temporal.Begins; import org.opengis.filter.temporal.BegunBy; import org.opengis.filter.temporal.BinaryTemporalOperator; import org.opengis.filter.temporal.During; import org.opengis.filter.temporal.EndedBy; import org.opengis.filter.temporal.Ends; import org.opengis.filter.temporal.Meets; import org.opengis.filter.temporal.MetBy; import org.opengis.filter.temporal.OverlappedBy; import org.opengis.filter.temporal.TContains; import org.opengis.filter.temporal.TEquals; import org.opengis.filter.temporal.TOverlaps; /** * Custom {@link Capabilities} supporting temporal capabilities and operators. Uses a custom {@link IsFullySupportedFilterVisitor} * to enable support for {@link IncludeFilter}, {@link ExcludeFilter} and {@link BegunBy}. * */ class ElasticCapabilities extends Capabilities { private static final Map,String> temporalNames; static { temporalNames = new HashMap<>(); temporalNames.put(After.class, After.NAME ); temporalNames.put(AnyInteracts.class, AnyInteracts.NAME ); temporalNames.put(Before.class, Before.NAME ); temporalNames.put(Begins.class, Begins.NAME ); temporalNames.put(BegunBy.class, BegunBy.NAME ); temporalNames.put(During.class, During.NAME ); temporalNames.put(EndedBy.class, EndedBy.NAME ); temporalNames.put(Ends.class, Ends.NAME ); temporalNames.put(Meets.class, Meets.NAME ); temporalNames.put(MetBy.class, MetBy.NAME ); temporalNames.put(OverlappedBy.class, OverlappedBy.NAME ); temporalNames.put(TContains.class, TContains.NAME ); temporalNames.put(TEquals.class, TEquals.NAME ); temporalNames.put(TOverlaps.class, TOverlaps.NAME ); } private IsFullySupportedFilterVisitor fullySupportedVisitor; public ElasticCapabilities() { super(new ElasticFilterCapabilities()); addAll(LOGICAL_OPENGIS); addAll(SIMPLE_COMPARISONS_OPENGIS); addType(PropertyIsNull.class); addType(PropertyIsBetween.class); addType(Id.class); addType(IncludeFilter.class); addType(ExcludeFilter.class); addType(PropertyIsLike.class); // spatial filters addType(BBOX.class); addType(Contains.class); //addType(Crosses.class); addType(Disjoint.class); //addType(Equals.class); addType(Intersects.class); //addType(Overlaps.class); //addType(Touches.class); addType(Within.class); addType(DWithin.class); addType(Beyond.class); //temporal filters addType(After.class); addType(Before.class); addType(Begins.class); addType(BegunBy.class); addType(During.class); addType(Ends.class); addType(EndedBy.class); addType(TContains.class); addType(TEquals.class); } @Override public boolean fullySupports(Filter filter) { if( fullySupportedVisitor == null ){ fullySupportedVisitor = new ElasticIsFullySupportedFilterVisitor(); } return filter != null ? (Boolean) filter.accept( fullySupportedVisitor, null ) : false; } @Override public String toOperationName( @SuppressWarnings("rawtypes") Class filterType ) { if (filterType != null && temporalNames.containsKey(filterType)) { return temporalNames.get(filterType); } return super.toOperationName(filterType); } @Override public void addName( String name ) { if (name != null && temporalNames.containsValue(name)) { final TemporalOperators operators = getContents().getTemporalCapabilities().getTemporalOperators(); operators.getOperators().add(new TemporalOperatorImpl(name)); } else { super.addName(name); } } private static class ElasticFilterCapabilities extends FilterCapabilitiesImpl { TemporalCapabilitiesImpl temporal; @Override public TemporalCapabilities getTemporalCapabilities() { if( temporal == null ){ temporal = new TemporalCapabilitiesImpl(); super.setTemporal(temporal); } return temporal; } } private class ElasticIsFullySupportedFilterVisitor extends IsFullySupportedFilterVisitor { ElasticIsFullySupportedFilterVisitor() { super(getContents()); } public Object visit( ExcludeFilter filter, Object extraData ) { return true; } public Object visit( IncludeFilter filter, Object extraData ) { return true; } public Object visit(BegunBy begunBy, Object extraData) { return visit((BinaryTemporalOperator)begunBy, BegunBy.NAME); } } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticClient.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.io.Closeable; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Set; interface ElasticClient extends Closeable { String RUN_AS = "es-security-runas-user"; double getVersion(); List getTypes(String indexName) throws IOException; Map getMapping(String indexName, String type) throws IOException; ElasticResponse search(String searchIndices, String type, ElasticRequest request) throws IOException; ElasticResponse scroll(String scrollId, Integer scrollTime) throws IOException; @Override void close() throws IOException; void clearScroll(Set scrollIds) throws IOException; } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticConstants.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.util.Collections; import java.util.Map; import com.google.common.collect.ImmutableMap; import mil.nga.giat.data.elasticsearch.ElasticAttribute.ElasticGeometryType; final class ElasticConstants { public static final Map MATCH_ALL = ImmutableMap.of("match_all", Collections.EMPTY_MAP); /** * Key used in the feature type user data to store the format for date * fields, if relevant. */ public static final String DATE_FORMAT = "date_format"; /** * Key used in the feature type user data to store the full name for fields. */ public static final String FULL_NAME = "full_name"; /** * Key used in the feature type user data to store the Elasticsearch geometry * type ({@link ElasticGeometryType}). */ public static final String GEOMETRY_TYPE = "geometry_type"; /** * Key used in the feature type user data to indicate whether the field is analyzed. */ public static final String ANALYZED = "analyzed"; /** * Key used in the feature type user data to indicate whether the field is nested. */ public static final String NESTED = "nested"; } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticDataStore.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Logger; import java.util.stream.Collectors; import org.apache.http.HttpHost; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.geotools.data.Query; import org.geotools.data.Transaction; import org.geotools.data.store.ContentDataStore; import org.geotools.data.store.ContentEntry; import org.geotools.data.store.ContentFeatureSource; import org.geotools.feature.NameImpl; import org.geotools.util.logging.Logging; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.Point; import org.opengis.feature.type.Name; import mil.nga.giat.data.elasticsearch.ElasticAttribute.ElasticGeometryType; import mil.nga.giat.shaded.es.common.joda.Joda; /** * A data store for an Elasticsearch index containing geo_point or geo_shape * types. * */ public class ElasticDataStore extends ContentDataStore { private final static Logger LOGGER = Logging.getLogger(ElasticDataStore.class); private ElasticClient client; private final String indexName; private final List baseTypeNames; private final Map docTypes; private Map layerConfigurations; private boolean sourceFilteringEnabled; private Integer defaultMaxFeatures; private Long scrollSize; private boolean scrollEnabled; private Integer scrollTime; private ArrayEncoding arrayEncoding; private Long gridSize; private Double gridThreshold; public enum ArrayEncoding { /** * Return all arrays without encoding. */ JSON, /** * URL encode and join string array elements. */ CSV } public ElasticDataStore(String searchHost, Integer hostPort, String indexName) throws IOException { this(RestClient.builder(new HttpHost(searchHost, hostPort, "http")).build(), indexName); } public ElasticDataStore(RestClient restClient, String indexName) throws IOException { this(restClient, null, indexName, false); } public ElasticDataStore(RestClient restClient, RestClient proxyRestClient, String indexName, boolean enableRunAs) throws IOException { LOGGER.fine("Initializing data store for " + indexName); this.indexName = indexName; try { checkRestClient(restClient); if (proxyRestClient != null) { checkRestClient(proxyRestClient); } client = new RestElasticClient(restClient, proxyRestClient, enableRunAs); } catch (Exception e) { throw new IOException("Unable to create REST client", e); } LOGGER.fine("Created REST client: " + client); final List types = getClient().getTypes(indexName); if (!types.isEmpty()) { baseTypeNames = types.stream().map(NameImpl::new).collect(Collectors.toList()); } else { baseTypeNames = new ArrayList<>(); } layerConfigurations = new ConcurrentHashMap<>(); docTypes = new HashMap<>(); arrayEncoding = ArrayEncoding.JSON; } @Override protected List createTypeNames() { final List names = new ArrayList<>(); names.addAll(baseTypeNames); names.addAll(docTypes.keySet()); return names; } @Override protected ContentFeatureSource createFeatureSource(ContentEntry entry) throws IOException { return new ElasticFeatureSource(entry, Query.ALL); } @Override public ContentFeatureSource getFeatureSource(Name name, Transaction tx) throws IOException { final ElasticLayerConfiguration layerConfig = layerConfigurations.get(name.getLocalPart()); if (layerConfig != null) { docTypes.put(name, layerConfig.getDocType()); } final ContentFeatureSource featureSource = super.getFeatureSource(name, tx); featureSource.getEntry().getState(Transaction.AUTO_COMMIT).flush(); return featureSource; } public List getElasticAttributes(Name layerName) throws IOException { final String localPart = layerName.getLocalPart(); final ElasticLayerConfiguration layerConfig = layerConfigurations.get(localPart); final List elasticAttributes; if (layerConfig == null || layerConfig.getAttributes().isEmpty()) { final String docType = docTypes.getOrDefault(layerName, localPart); final Map mapping = getClient().getMapping(indexName, docType); elasticAttributes = new ArrayList<>(); if (mapping != null) { add(elasticAttributes, "_id", "string", mapping, false); add(elasticAttributes, "_index", "string", mapping, false); add(elasticAttributes, "_type", "string", mapping, false); add(elasticAttributes, "_score", "float", mapping, false); add(elasticAttributes, "_relative_score", "float", mapping, false); add(elasticAttributes, "_aggregation", "binary", mapping, false); walk(elasticAttributes, mapping, "", false, false); // add default geometry and short name and count duplicate short names final Map counts = new HashMap<>(); boolean foundGeometry = false; for (final ElasticAttribute attribute : elasticAttributes) { if (!foundGeometry && Geometry.class.isAssignableFrom(attribute.getType())) { attribute.setDefaultGeometry(true); foundGeometry = true; } final String[] parts = attribute.getName().split("\\."); attribute.setShortName(parts[parts.length-1]); final int count; if (counts.containsKey(attribute.getShortName())) { count = counts.get(attribute.getShortName())+1; } else { count = 1; } counts.put(attribute.getShortName(), count); } // use full name if short name has duplicates for (final ElasticAttribute attribute : elasticAttributes) { if (counts.get(attribute.getShortName()) > 1) { attribute.setShortName(attribute.getName()); } } } } else { elasticAttributes = layerConfig.getAttributes(); } return elasticAttributes; } String getIndexName() { return indexName; } ElasticClient getClient() { return client; } boolean isSourceFilteringEnabled() { return sourceFilteringEnabled; } public void setSourceFilteringEnabled(boolean sourceFilteringEnabled) { this.sourceFilteringEnabled = sourceFilteringEnabled; } public Integer getDefaultMaxFeatures() { return defaultMaxFeatures; } public void setDefaultMaxFeatures(Integer defaultMaxFeatures) { this.defaultMaxFeatures = defaultMaxFeatures; } public Long getScrollSize() { return scrollSize; } public Boolean getScrollEnabled() { return scrollEnabled; } public Integer getScrollTime() { return scrollTime; } public void setScrollSize(Long scrollSize) { this.scrollSize = scrollSize; } public void setScrollEnabled(Boolean scrollEnabled) { this.scrollEnabled = scrollEnabled; } public void setScrollTime(Integer scrollTime) { this.scrollTime = scrollTime; } public ArrayEncoding getArrayEncoding() { return arrayEncoding; } public void setArrayEncoding(ArrayEncoding arrayEncoding) { this.arrayEncoding = arrayEncoding; } public Long getGridSize() { return gridSize; } public void setGridSize(Long gridSize) { this.gridSize = gridSize; } public Double getGridThreshold() { return gridThreshold; } public void setGridThreshold(Double gridThreshold) { this.gridThreshold = gridThreshold; } public Map getLayerConfigurations() { return layerConfigurations; } public void setLayerConfiguration(ElasticLayerConfiguration layerConfig) { final String layerName = layerConfig.getLayerName(); this.layerConfigurations.put(layerName, layerConfig); } public Map getDocTypes() { return docTypes; } public String getDocType(Name typeName) { final String docType; if (docTypes.containsKey(typeName)) { docType = docTypes.get(typeName); } else { docType = typeName.getLocalPart(); } return docType; } @SuppressWarnings({ "rawtypes", "unchecked" }) private void walk(List elasticAttributes, Map map, String propertyKey, boolean startType, boolean nested) { for (final Map.Entry entry : map.entrySet()) { final String key = entry.getKey(); final Object value = entry.getValue(); if (!key.equals("_timestamp") && Map.class.isAssignableFrom(value.getClass())) { final String newPropertyKey; if (!startType && key.equals("properties")) { newPropertyKey = propertyKey; } else if (propertyKey.isEmpty()) { newPropertyKey = entry.getKey(); } else { newPropertyKey = propertyKey + "." + key; } startType = !startType && key.equals("properties"); if (!nested && map.containsKey("type")) { nested = map.get("type").equals("nested"); } if (ElasticParserUtil.isGeoPointFeature((Map) value)) { add(elasticAttributes, propertyKey + ".coordinates", "geo_point", (Map) value, nested); } else { walk(elasticAttributes, (Map) value, newPropertyKey, startType, nested); } } else if (key.equals("type") && !value.equals("nested")) { add(elasticAttributes, propertyKey, (String) value, map, nested); } else if (key.equals("_timestamp")) { add(elasticAttributes, "_timestamp", "date", map, nested); } } } private void add(List elasticAttributes, String propertyKey, String propertyType, Map map, boolean nested) { if (propertyKey != null) { final ElasticAttribute elasticAttribute = new ElasticAttribute(propertyKey); final Class binding; switch (propertyType) { case "geo_point": binding = Point.class; elasticAttribute.setSrid(4326); elasticAttribute.setGeometryType(ElasticGeometryType.GEO_POINT); break; case "geo_shape": binding = Geometry.class; elasticAttribute.setSrid(4326); elasticAttribute.setGeometryType(ElasticGeometryType.GEO_SHAPE); break; case "string": case "keyword": case "text": binding = String.class; elasticAttribute.setAnalyzed(isAnalyzed(map)); break; case "integer": binding = Integer.class; break; case "long": binding = Long.class; break; case "float": binding = Float.class; break; case "double": binding = Double.class; break; case "boolean": binding = Boolean.class; break; case "date": String format = (String) map.get("format"); if (format != null) { try { Joda.forPattern(format); } catch (Exception e) { LOGGER.fine("Unable to parse date format ('" + format + "') for " + propertyKey); format = null; } } if (format == null) { format = "date_optional_time"; } elasticAttribute.setDateFormat(format); binding = Date.class; break; case "binary": binding = byte[].class; break; default: binding = null; break; } if (binding != null) { final boolean stored; if (map.get("store") != null) { stored = (Boolean) map.get("store"); } else { stored = false; } elasticAttribute.setStored(stored); elasticAttribute.setType(binding); elasticAttribute.setNested(nested); elasticAttributes.add(elasticAttribute); } } } private static void checkRestClient(RestClient client) throws IOException { final Response response = client.performRequest(new Request("GET", "/")); final int status = response.getStatusLine().getStatusCode(); if (status >= 400) { final String reason = response.getStatusLine().getReasonPhrase(); throw new IOException(String.format("Unexpected response from Elasticsearch: %d %s", status, reason)); } } static boolean isAnalyzed(Map map) { boolean analyzed = false; Object value = map.get("type"); if (value instanceof String && value.equals("text")) { analyzed = true; } return analyzed; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticDataStoreFactory.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import org.apache.http.HttpHost; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.ssl.SSLContextBuilder; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.geotools.data.DataStore; import org.geotools.data.DataStoreFactorySpi; import mil.nga.giat.data.elasticsearch.ElasticDataStore.ArrayEncoding; import org.geotools.data.Parameter; import org.geotools.util.logging.Logging; import java.awt.RenderingHints.Key; import java.io.IOException; import java.io.Serializable; import java.io.UncheckedIOException; import java.security.KeyManagementException; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.util.Collections; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Data store factory that creates {@linkplain ElasticDataStore} instances. * */ @SuppressWarnings("WeakerAccess") public class ElasticDataStoreFactory implements DataStoreFactorySpi { /** The logger for this class */ private final static Logger LOGGER = Logging.getLogger(ElasticDataStoreFactory.class); /** * System property name for flag indicating whether to force use of authenticated GeoServer user when submitting requests. */ private final static String FORCE_RUNAS_PROPERTY = "org.geoserver.elasticsearch.xpack.force-runas"; /** Counter of HTTP threads we generate */ static final AtomicInteger httpThreads = new AtomicInteger(1); public static final String DISPLAY_NAME = "Elasticsearch"; public static final String DESCRIPTION = "Elasticsearch Index"; /** Cluster hostnames. **/ public static final Param HOSTNAME = new Param("elasticsearch_host", String.class, "Host(s) with optional HTTP scheme and port.", true, "localhost"); /** Cluster client port. **/ public static final Param HOSTPORT = new Param("elasticsearch_port", Integer.class, "Default HTTP port. Ignored if the host includes the port.", true, 9200); /** Index name. **/ public static final Param INDEX_NAME = new Param("index_name", String.class, "Index defining type (supports wildcard)", true); /** Username. */ public static final Param USER = new Param("user", String.class, "Elasticsearch user", isForceRunas()); /** Password. */ public static final Param PASSWD = new Param("passwd", String.class, "Elasticsearch password", isForceRunas(), null, Collections.singletonMap(Parameter.IS_PASSWORD, Boolean.TRUE)); public static final Param RUNAS_GEOSERVER_USER = new Param("runas_geoserver_user", Boolean.class, "Flag indicating whether to submit document search requests on behalf of the authenticated GeoServer user", false, isForceRunas()); /** Username. */ public static final Param PROXY_USER = new Param("proxy_user", String.class, "Elasticsearch user for document search requests. If not provided the default user is used for all requests.", isForceRunas()); /** Password. */ public static final Param PROXY_PASSWD = new Param("proxy_passwd", String.class, "Elasticsearch proxy user password.", isForceRunas(),null, Collections.singletonMap(Parameter.IS_PASSWORD, Boolean.TRUE)); public static final Param SSL_REJECT_UNAUTHORIZED = new Param("ssl_reject_unauthorized", Boolean.class, "Whether to validate the server certificate during the SSL handshake for https connections", false, true); public static final Param SOURCE_FILTERING_ENABLED = new Param("source_filtering_enabled", Boolean.class, "Enable source field filtering", false, false); public static final Param SCROLL_ENABLED = new Param("scroll_enabled", Boolean.class, "Use scan search type instead of dfs_query_then_fetch", false, false); public static final Param SCROLL_SIZE = new Param("scroll_size", Long.class, "Scroll size (ignored if scroll_enabled=false)", false, 20); public static final Param SCROLL_TIME_SECONDS = new Param("scroll_time", Integer.class, "Time to keep the scroll open in seconds (ignored if scroll_enabled=false)", false, 120); public static final Param DEFAULT_MAX_FEATURES = new Param("default_max_features", Integer.class, "Default max features", false, 100); public static final Param ARRAY_ENCODING = new Param("array_encoding", String.class, "Array encoding strategy. Allowed values are \"JSON\" (keep arrays) " + " and \"CSV\" (URL encode and join array elements).", false, "JSON"); public static final Param GRID_SIZE = new Param("grid_size", Long.class, "Hint for Geohash grid size (nrow*ncol)", false, 10000L); public static final Param GRID_THRESHOLD = new Param("grid_threshold", Double.class, "Geohash grid aggregation precision will be the minimum necessary to satisfy actual_grid_size/grid_size>grid_threshold", false, 0.05); public static final Param[] PARAMS = { HOSTNAME, HOSTPORT, INDEX_NAME, USER, PASSWD, RUNAS_GEOSERVER_USER, PROXY_USER, PROXY_PASSWD, SSL_REJECT_UNAUTHORIZED, SOURCE_FILTERING_ENABLED, SCROLL_ENABLED, SCROLL_SIZE, SCROLL_TIME_SECONDS, DEFAULT_MAX_FEATURES, ARRAY_ENCODING, GRID_SIZE, GRID_THRESHOLD }; @Override public String getDisplayName() { return DISPLAY_NAME; } @Override public String getDescription() { return DESCRIPTION; } @Override public Param[] getParametersInfo() { return PARAMS; } @Override public boolean canProcess(Map params) { boolean result = false; try { final String searchHost = (String) HOSTNAME.lookUp(params); final String indexName = (String) INDEX_NAME.lookUp(params); final Integer hostport = (Integer) HOSTPORT.lookUp(params); if (searchHost != null && hostport != null && indexName != null) { result = true; } } catch (IOException e) { // ignore } return result; } @Override public boolean isAvailable() { return true; } @Override public Map getImplementationHints() { return null; } @Override public DataStore createDataStore(Map params) throws IOException { final String user = getValue(USER, params); final String passwd = getValue(PASSWD, params); final String proxyUser = getValue(PROXY_USER, params); final String proxyPasswd = getValue(PROXY_PASSWD, params); final RestClient client = createRestClient(params, user, passwd); final RestClient proxyClient = proxyUser != null ? createRestClient(params, proxyUser, proxyPasswd) : null; return createDataStore(client, proxyClient, params); } public DataStore createDataStore(RestClient client, RestClient proxyClient, Map params) throws IOException { final String indexName = (String) INDEX_NAME.lookUp(params); final String arrayEncoding = getValue(ARRAY_ENCODING, params); final boolean runAsGeoServerUser = getValue(RUNAS_GEOSERVER_USER, params); if (isForceRunas() && !runAsGeoServerUser) { throw new IllegalArgumentException(RUNAS_GEOSERVER_USER.key + " is disabled but " + FORCE_RUNAS_PROPERTY + " is set. " + "Enable " + RUNAS_GEOSERVER_USER.key + " or unset " + FORCE_RUNAS_PROPERTY + " in the system environment."); } final ElasticDataStore dataStore = new ElasticDataStore(client, proxyClient, indexName, runAsGeoServerUser); dataStore.setDefaultMaxFeatures(getValue(DEFAULT_MAX_FEATURES, params)); dataStore.setSourceFilteringEnabled(getValue(SOURCE_FILTERING_ENABLED, params)); dataStore.setScrollEnabled(getValue(SCROLL_ENABLED, params)); dataStore.setScrollSize(((Number)getValue(SCROLL_SIZE, params)).longValue()); dataStore.setScrollTime(getValue(SCROLL_TIME_SECONDS, params)); dataStore.setArrayEncoding(ArrayEncoding.valueOf(arrayEncoding.toUpperCase())); dataStore.setGridSize((Long) GRID_SIZE.lookUp(params)); dataStore.setGridThreshold((Double) GRID_THRESHOLD.lookUp(params)); return dataStore; } public RestClient createRestClient(Map params) throws IOException { return createRestClient(params, null, null); } private RestClient createRestClient(Map params, String user, String password) throws IOException { final String hostName = getValue(HOSTNAME, params); final String[] hosts = hostName.split(","); final Integer defaultPort = getValue(HOSTPORT, params); final Boolean sslRejectUnauthorized = getValue(SSL_REJECT_UNAUTHORIZED, params); final String adminUser = getValue(USER, params); final String type = user == null || adminUser == null || user.equals(adminUser) ? "ADMIN" : "PROXY_USER"; final Pattern pattern = Pattern.compile("(?https?)?(://)?(?[^:]+):?(?\\d+)?"); final HttpHost[] httpHosts = new HttpHost[hosts.length]; final AuthScope[] auths = new AuthScope[hosts.length]; for (int index=0; index < hosts.length; index++) { final Matcher matcher = pattern.matcher(hosts[index].trim()); if (matcher.find()) { final String scheme = matcher.group("scheme") != null ? matcher.group("scheme") : "http"; final String host = matcher.group("host"); final Integer port = matcher.group("port") != null ? Integer.valueOf(matcher.group("port")) : defaultPort; httpHosts[index] = new HttpHost(host, port, scheme); auths[index] = new AuthScope(host, port); } else { throw new IOException("Unable to parse host"); } } final RestClientBuilder builder = createClientBuilder(httpHosts); if (user != null) { builder.setRequestConfigCallback((b) -> { LOGGER.finest(String.format("Calling %s setRequestConfigCallback", type)); return b.setAuthenticationEnabled(true); }); } builder.setHttpClientConfigCallback((httpClientBuilder) -> { LOGGER.finest(String.format("Calling %s customizeHttpClient", type)); httpClientBuilder.setThreadFactory((run) -> { final Thread thread = new Thread(run); thread.setDaemon(true); thread.setName(String.format("esrest-asynchttp-%s-%d", type, httpThreads.getAndIncrement())); return thread; }); httpClientBuilder.useSystemProperties(); if (!sslRejectUnauthorized) { httpClientBuilder.setSSLHostnameVerifier((host,session) -> true); try { httpClientBuilder.setSSLContext(SSLContextBuilder.create().loadTrustMaterial((chain,authType) ->true).build()); } catch (KeyManagementException | NoSuchAlgorithmException | KeyStoreException e) { throw new UncheckedIOException(new IOException("Unable to create SSLContext", e)); } } if (user != null) { final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); final Credentials credentials = new org.apache.http.auth.UsernamePasswordCredentials(user, password); for (AuthScope scope : auths) { credentialsProvider.setCredentials(scope, credentials); } httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); } return httpClientBuilder; }); LOGGER.fine(String.format("Building a %s RestClient for %s @ %s:%d", type, user, hostName, defaultPort)); return builder.build(); } @Override public DataStore createNewDataStore(Map params) { return null; } public RestClientBuilder createClientBuilder(HttpHost[] hosts) { return RestClient.builder(hosts); } private static boolean isForceRunas() { return System.getProperty(FORCE_RUNAS_PROPERTY) != null; } @SuppressWarnings({"unchecked" }) static T getValue(Param param, Map params) throws IOException { final Object value; if (param.lookUp(params) != null) { value = param.lookUp(params); } else { value = param.sample; } return (T) value; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticFeatureReader.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import com.fasterxml.jackson.databind.ObjectMapper; import mil.nga.giat.data.elasticsearch.ElasticDataStore.ArrayEncoding; import mil.nga.giat.shaded.es.common.joda.Joda; import mil.nga.giat.shaded.joda.time.format.DateTimeFormatter; import static mil.nga.giat.data.elasticsearch.ElasticConstants.DATE_FORMAT; import static mil.nga.giat.data.elasticsearch.ElasticConstants.FULL_NAME; import org.geotools.data.FeatureReader; import org.geotools.data.store.ContentState; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.util.logging.Logging; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import java.io.IOException; import java.util.Collections; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.logging.Logger; /** * FeatureReader access to the Elasticsearch index. */ class ElasticFeatureReader implements FeatureReader { private final static Logger LOGGER = Logging.getLogger(ElasticFeatureReader.class); private final ContentState state; private final SimpleFeatureType featureType; private final float maxScore; private final ObjectMapper mapper; private final ArrayEncoding arrayEncoding; private SimpleFeatureBuilder builder; private Iterator searchHitIterator; private Iterator> aggregationIterator; private final ElasticParserUtil parserUtil; public ElasticFeatureReader(ContentState contentState, ElasticResponse response) { this(contentState, response.getHits(), response.getAggregations(), response.getMaxScore()); } public ElasticFeatureReader(ContentState contentState, List hits, Map aggregations, float maxScore) { this.state = contentState; this.featureType = state.getFeatureType(); this.searchHitIterator = hits.iterator(); this.builder = new SimpleFeatureBuilder(featureType); this.parserUtil = new ElasticParserUtil(); this.maxScore = maxScore; this.aggregationIterator = Collections.emptyIterator(); if (aggregations != null && !aggregations.isEmpty()) { String aggregationName = aggregations.keySet().stream().findFirst().orElse(null); if (aggregations.size() > 1) { LOGGER.info("Result has multiple aggregations. Using " + aggregationName); } if (aggregations.get(aggregationName).getBuckets() != null) { this.aggregationIterator = aggregations.get(aggregationName).getBuckets().iterator(); } } if (contentState.getEntry() != null && contentState.getEntry().getDataStore() != null) { final ElasticDataStore dataStore; dataStore = (ElasticDataStore) contentState.getEntry().getDataStore(); this.arrayEncoding = dataStore.getArrayEncoding(); } else { this.arrayEncoding = ArrayEncoding.valueOf((String) ElasticDataStoreFactory.ARRAY_ENCODING.getDefaultValue()); } this.mapper = new ObjectMapper(); } @Override public SimpleFeatureType getFeatureType() { return this.featureType; } @Override public SimpleFeature next() { final String id; if (searchHitIterator.hasNext()) { id = nextHit(); } else { nextAggregation(); id = null; } return builder.buildFeature(id); } private String nextHit() { final ElasticHit hit = searchHitIterator.next(); final SimpleFeatureType type = getFeatureType(); final Map source = hit.getSource(); final Float score; final Float relativeScore; if (hit.getScore() != null && !Float.isNaN(hit.getScore()) && maxScore>0) { score = hit.getScore(); relativeScore = score / maxScore; } else { score = null; relativeScore = null; } for (final AttributeDescriptor descriptor : type.getAttributeDescriptors()) { final String name = descriptor.getType().getName().getLocalPart(); final String sourceName = (String) descriptor.getUserData().get(FULL_NAME); List values = hit.field(sourceName); if (values == null && source != null) { // read field from source values = parserUtil.readField(source, sourceName); } if (values == null && sourceName.equals("_id")) { builder.set(name, hit.getId()); } else if (values == null && sourceName.equals("_index")) { builder.set(name, hit.getIndex()); } else if (values == null && sourceName.equals("_type")) { builder.set(name, hit.getType()); } else if (values == null && sourceName.equals("_score")) { builder.set(name, score); } else if (values == null && sourceName.equals("_relative_score")) { builder.set(name, relativeScore); } else if (values != null && Geometry.class.isAssignableFrom(descriptor.getType().getBinding())) { if (values.size() == 1) { builder.set(name, parserUtil.createGeometry(values.get(0))); } else { builder.set(name, parserUtil.createGeometry(values)); } } else if (values != null && Date.class.isAssignableFrom(descriptor.getType().getBinding())) { Object dataVal = values.get(0); if (dataVal instanceof Double) { builder.set(name, new Date(Math.round((Double) dataVal))); } else if (dataVal instanceof Integer) { builder.set(name, new Date((Integer) dataVal)); } else if (dataVal instanceof Long) { builder.set(name, new Date((long) dataVal)); } else { final String format = (String) descriptor.getUserData().get(DATE_FORMAT); final DateTimeFormatter dateFormatter = Joda.forPattern(format).parser(); Date date = dateFormatter.parseDateTime((String) dataVal).toDate(); builder.set(name, date); } } else if (values != null && values.size() == 1) { builder.set(name, values.get(0)); } else if (values != null && !name.equals("_aggregation")) { final Object value; if (arrayEncoding == ArrayEncoding.CSV) { // only include first array element when using CSV array encoding value = values.get(0); } else { value = values; } builder.set(name, value); } } return state.getEntry().getTypeName() + "." + hit.getId(); } private void nextAggregation() { final Map aggregation = aggregationIterator.next(); try { final byte[] data = mapper.writeValueAsBytes(aggregation); builder.set("_aggregation", data); } catch (IOException e) { LOGGER.warning("Unable to set aggregation. Try reloading layer."); } } @Override public boolean hasNext() { return searchHitIterator.hasNext() || aggregationIterator.hasNext(); } @Override public void close() { builder = null; searchHitIterator = null; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticFeatureReaderScroll.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.NoSuchElementException; import java.util.Set; import java.util.logging.Logger; import org.geotools.data.FeatureReader; import org.geotools.data.store.ContentState; import org.geotools.util.logging.Logging; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; class ElasticFeatureReaderScroll implements FeatureReader { private final static Logger LOGGER = Logging.getLogger(ElasticFeatureReaderScroll.class); private final ContentState contentState; private final int maxFeatures; private String nextScrollId; private ElasticFeatureReader delegate; private int numFeatures; private boolean lastScroll; private final Set scrollIds; public ElasticFeatureReaderScroll(ContentState contentState, ElasticResponse searchResponse, int maxFeatures) { this.contentState = contentState; this.maxFeatures = maxFeatures; this.numFeatures = 0; this.scrollIds = new HashSet<>(); processResponse(searchResponse); } private void advanceScroll() throws IOException { final ElasticDataStore dataStore; dataStore = (ElasticDataStore) contentState.getEntry().getDataStore(); processResponse(dataStore.getClient().scroll(nextScrollId, dataStore.getScrollTime())); } private void processResponse(ElasticResponse searchResponse) { final int numHits = searchResponse.getNumHits(); final List hits; if (numFeatures+numHits <= maxFeatures) { hits = searchResponse.getResults().getHits(); } else { final int n = maxFeatures-numFeatures; hits = searchResponse.getResults().getHits().subList(0,n); } delegate = new ElasticFeatureReader(contentState, hits, searchResponse.getAggregations(), 0); nextScrollId = searchResponse.getScrollId(); lastScroll = numHits == 0 || numFeatures+hits.size()>=maxFeatures; LOGGER.fine("Scoll numHits=" + hits.size() + " (total=" + numFeatures+hits.size()); scrollIds.add(nextScrollId); } @Override public SimpleFeatureType getFeatureType() { return delegate.getFeatureType(); } @Override public SimpleFeature next() throws IOException { final SimpleFeature feature; if (hasNext()) { numFeatures++; feature = delegate.next(); } else { throw new NoSuchElementException(); } return feature; } @Override public boolean hasNext() throws IOException { if (!delegate.hasNext() && !lastScroll) { advanceScroll(); } return (delegate.hasNext() || !lastScroll) && numFeatures attributes = dataStore.getElasticAttributes(entry.getName()); final ElasticLayerConfiguration config = new ElasticLayerConfiguration(entry.getName().getLocalPart()); config.getAttributes().addAll(attributes); dataStore.setLayerConfiguration(config); } } /** * Access parent datastore */ public ElasticDataStore getDataStore() { return (ElasticDataStore) super.getDataStore(); } /** * Implementation that generates the total bounds */ @Override protected ReferencedEnvelope getBoundsInternal(Query query) throws IOException { LOGGER.fine("getBoundsInternal"); final CoordinateReferenceSystem crs = getSchema().getCoordinateReferenceSystem(); final ReferencedEnvelope bounds = new ReferencedEnvelope(crs); try (FeatureReader featureReader = getReaderInternal(query)) { while (featureReader.hasNext()) { final SimpleFeature feature = featureReader.next(); bounds.include(feature.getBounds()); } } return bounds; } @Override protected int getCountInternal(Query query) throws IOException { LOGGER.fine("getCountInternal"); int hits = 0; final ElasticRequest searchRequest = prepareSearchRequest(query, false); try { if (!filterFullySupported) { try (FeatureReader reader = getReaderInternal(query)) { while (reader.hasNext()) { reader.next(); hits++; } } } else { searchRequest.setSize(0); final ElasticDataStore dataStore = getDataStore(); final String docType = dataStore.getDocType(entry.getName()); final ElasticResponse sr = dataStore.getClient().search(dataStore.getIndexName(), docType, searchRequest); final int totalHits = (int) sr.getTotalNumHits(); final int size = getSize(query); final int from = getStartIndex(query); hits = Math.max(0, Math.min(totalHits - from, size)); } } catch (Exception e) { LOGGER.log(Level.SEVERE, e.getMessage(), e); throw new IOException("Error executing count search", e); } return hits; } @Override protected FeatureReader getReaderInternal(Query query) throws IOException { LOGGER.fine("getReaderInternal"); FeatureReader reader; try { final ElasticDataStore dataStore = getDataStore(); final String docType = dataStore.getDocType(entry.getName()); final boolean scroll = !useSortOrPagination(query) && dataStore.getScrollEnabled(); final ElasticRequest searchRequest = prepareSearchRequest(query, scroll); final ElasticResponse sr = dataStore.getClient().search(dataStore.getIndexName(), docType, searchRequest); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Search response: " + sr); } if (!scroll) { reader = new ElasticFeatureReader(getState(), sr); } else { reader = new ElasticFeatureReaderScroll(getState(), sr, getSize(query)); } if (!filterFullySupported) { reader = new FilteringFeatureReader<>(reader, query.getFilter()); } } catch (Exception e) { LOGGER.log(Level.SEVERE, e.getMessage(), e); throw new IOException("Error executing query search", e); } return reader; } private ElasticRequest prepareSearchRequest(Query query, boolean scroll) throws IOException { String naturalSortOrder = SortOrder.ASCENDING.toSQL().toLowerCase(); final ElasticRequest searchRequest = new ElasticRequest(); final ElasticDataStore dataStore = getDataStore(); final String docType = dataStore.getDocType(entry.getName()); LOGGER.fine("Preparing " + docType + " (" + entry.getName() + ") query"); if (!scroll) { if (query.getSortBy()!=null){ for (final SortBy sort : query.getSortBy()) { final String sortOrder = sort.getSortOrder().toSQL().toLowerCase(); if (sort.getPropertyName() != null) { final String name = sort.getPropertyName().getPropertyName(); searchRequest.addSort(name, sortOrder); } else { naturalSortOrder = sortOrder; } } } // pagination searchRequest.setSize(getSize(query)); searchRequest.setFrom(getStartIndex(query)); } else { if (dataStore.getScrollSize() != null) { searchRequest.setSize(dataStore.getScrollSize().intValue()); } if (dataStore.getScrollTime() != null) { searchRequest.setScroll(dataStore.getScrollTime()); } } if (dataStore.isSourceFilteringEnabled()) { if (query.getProperties() != Query.ALL_PROPERTIES) { for (String property : query.getPropertyNames()) { searchRequest.addSourceInclude(property); } } else { // add source includes setSourceIncludes(searchRequest); } } // add query and post filter final FilterToElastic filterToElastic = new FilterToElastic(); filterToElastic.setFeatureType(buildFeatureType()); filterToElastic.encode(query); filterFullySupported = filterToElastic.getFullySupported(); if (!filterFullySupported) { LOGGER.fine("Filter is not fully supported by native Elasticsearch." + " Additional post-query filtering will be performed."); } final Map queryBuilder = filterToElastic.getQueryBuilder(); final Map nativeQueryBuilder = filterToElastic.getNativeQueryBuilder(); searchRequest.setQuery(queryBuilder); if (isSort(query) && nativeQueryBuilder.equals(ElasticConstants.MATCH_ALL)) { final String sortKey = dataStore.getClient().getVersion() < 7 ? "_uid" : "_id"; searchRequest.addSort(sortKey, naturalSortOrder); } if (filterToElastic.getAggregations() != null) { final Map>> aggregations = filterToElastic.getAggregations(); final Envelope envelope = (Envelope) query.getFilter().accept(ExtractBoundsFilterVisitor.BOUNDS_VISITOR, null); final long gridSize; if (dataStore.getGridSize() != null) { gridSize = dataStore.getGridSize(); } else { gridSize = (Long) ElasticDataStoreFactory.GRID_SIZE.getDefaultValue(); } final double gridThreshold; if (dataStore.getGridThreshold() != null) { gridThreshold = dataStore.getGridThreshold(); } else { gridThreshold = (Double) ElasticDataStoreFactory.GRID_THRESHOLD.getDefaultValue(); } final int precision = GeohashUtil.computePrecision(envelope, gridSize, gridThreshold); LOGGER.fine("Updating GeoHash grid aggregation precision to " + precision); GeohashUtil.updateGridAggregationPrecision(aggregations, precision); searchRequest.setAggregations(aggregations); searchRequest.setSize(0); } return searchRequest; } private void setSourceIncludes(final ElasticRequest searchRequest) throws IOException { final ElasticDataStore dataStore = getDataStore(); final List attributes = dataStore.getElasticAttributes(entry.getName()); for (final ElasticAttribute attribute : attributes) { if (attribute.isUse() && attribute.isStored()) { searchRequest.addField(attribute.getName()); } else if (attribute.isUse()) { searchRequest.addSourceInclude(attribute.getName()); } } } private boolean isSort(Query query) { return query.getSortBy() != null && query.getSortBy().length > 0; } private boolean useSortOrPagination(Query query) { return (query.getSortBy() != null && query.getSortBy().length > 0) || query.getStartIndex()!=null; } private int getSize(Query query) { final int size; if (!query.isMaxFeaturesUnlimited()) { size = query.getMaxFeatures(); } else { size = getDataStore().getDefaultMaxFeatures(); LOGGER.fine("Unlimited maxFeatures not supported. Using default: " + size); } return size; } private int getStartIndex(Query query) { final int from; if (query.getStartIndex() != null) { from = query.getStartIndex(); } else { from = 0; } return from; } @Override protected SimpleFeatureType buildFeatureType() { final ElasticDataStore ds = getDataStore(); final ElasticLayerConfiguration layerConfig; layerConfig = ds.getLayerConfigurations().get(entry.getTypeName()); final List attributes; if (layerConfig != null) { attributes = layerConfig.getAttributes(); } else { attributes = null; } final ElasticFeatureTypeBuilder typeBuilder; typeBuilder = new ElasticFeatureTypeBuilder(attributes, entry.getName()); return typeBuilder.buildFeatureType(); } @Override protected boolean canLimit() { return true; } @Override protected boolean canOffset() { return true; } @Override protected boolean canFilter() { return true; } @Override protected boolean canSort() { return true; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticFeatureTypeBuilder.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import static mil.nga.giat.data.elasticsearch.ElasticConstants.ANALYZED; import static mil.nga.giat.data.elasticsearch.ElasticConstants.DATE_FORMAT; import static mil.nga.giat.data.elasticsearch.ElasticConstants.FULL_NAME; import static mil.nga.giat.data.elasticsearch.ElasticConstants.GEOMETRY_TYPE; import static mil.nga.giat.data.elasticsearch.ElasticConstants.NESTED; import mil.nga.giat.data.elasticsearch.ElasticAttribute.ElasticGeometryType; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.referencing.CRS; import org.geotools.util.logging.Logging; import org.locationtech.jts.geom.Geometry; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.feature.type.Name; /** * Builds a feature type based on the attributes defined in the * {@link ElasticLayerConfiguration}. * */ class ElasticFeatureTypeBuilder extends SimpleFeatureTypeBuilder { private final static Logger LOGGER = Logging.getLogger(ElasticFeatureTypeBuilder.class); private final List attributes; public ElasticFeatureTypeBuilder(List attributes, Name name) { setName(name); this.attributes = attributes; } @Override public SimpleFeatureType buildFeatureType() { if (attributes != null) { String defaultGeometryName = null; for (ElasticAttribute attribute : attributes) { if (attribute.isUse()) { final String attributeName; if (attribute.getCustomName() != null) { attributeName = attribute.getCustomName(); } else if (attribute.getUseShortName()) { attributeName = attribute.getShortName(); } else { attributeName = attribute.getName(); } AttributeDescriptor att = null; if (Geometry.class.isAssignableFrom(attribute.getType())) { final Integer srid = attribute.getSrid(); try { if (srid != null) { attributeBuilder.setCRS(CRS.decode("EPSG:" + srid)); attributeBuilder.setName(attributeName); attributeBuilder.setBinding(attribute.getType()); att = attributeBuilder.buildDescriptor(attributeName, attributeBuilder.buildGeometryType()); final ElasticGeometryType geometryType = attribute.getGeometryType(); att.getUserData().put(GEOMETRY_TYPE, geometryType); if (attribute.isDefaultGeometry() != null && attribute.isDefaultGeometry()) { defaultGeometryName = attributeName; } } } catch (Exception e) { String msg = "Error occured determing srid for " + attribute.getName(); LOGGER.log(Level.WARNING, msg, e); } } else { attributeBuilder.setName(attributeName); attributeBuilder.setBinding(attribute.getType()); att = attributeBuilder.buildDescriptor(attributeName, attributeBuilder.buildType()); } if (att != null && attribute.getDateFormat() != null) { att.getUserData().put(DATE_FORMAT, attribute.getDateFormat()); } if (att != null) { att.getUserData().put(FULL_NAME, attribute.getName()); att.getUserData().put(ANALYZED, attribute.getAnalyzed()); att.getUserData().put(NESTED, attribute.isNested()); add(att); } } } if (defaultGeometryName != null) { setDefaultGeometry(defaultGeometryName); } } return super.buildFeatureType(); } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticHit.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.util.List; import java.util.Map; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; @SuppressWarnings("unused") @JsonIgnoreProperties(ignoreUnknown=true) class ElasticHit { @JsonProperty("_index") private String index; @JsonProperty("_type") private String type; @JsonProperty("_id") private String id; @JsonProperty("_score") private Float score; @JsonProperty("_source") private Map source; @JsonProperty("fields") private Map> fields; public String getIndex() { return index; } public String getType() { return type; } public String getId() { return id; } public Float getScore() { return score; } public Map getSource() { return source; } public Map> getFields() { return fields; } public List field(String name) { return this.fields != null ? this.fields.get(name) : null; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticLayerConfiguration.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.io.Serializable; import java.util.ArrayList; import java.util.List; /** * Describes an Elasticsearch layer configuration as set of {@link ElasticAttribute} */ public class ElasticLayerConfiguration implements Serializable { private static final long serialVersionUID = 1838874365349725912L; /** * Key to identify the Elasticsearch layer configuration. */ public static final String KEY = "ElasticLayerConfiguration"; private final String docType; private String layerName; private final List attributes; public ElasticLayerConfiguration(String docType) { this.docType = docType; this.layerName = docType; this.attributes = new ArrayList<>(); } public ElasticLayerConfiguration(ElasticLayerConfiguration other) { this(other.docType); setLayerName(other.layerName); for (final ElasticAttribute attribute : other.attributes) { attributes.add(new ElasticAttribute(attribute)); } } public String getDocType() { return docType; } public String getLayerName() { return layerName; } public void setLayerName(String layerName) { this.layerName = layerName; } public List getAttributes() { return attributes; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticMappings.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.util.Map; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @SuppressWarnings("unused") class ElasticMappings { private Map mappings; public Map getMappings() { return mappings; } public void setMappings(Map mappings) { this.mappings = mappings; } @JsonIgnoreProperties(ignoreUnknown=true) public static class Mapping { private Map properties; public Map getProperties() { return properties; } } public static class Untyped { private Mapping mappings; public Mapping getMappings() { return mappings; } } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticParserUtil.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.nio.charset.StandardCharsets; import java.util.*; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.geotools.util.logging.Logging; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.LinearRing; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKTReader; import com.github.davidmoten.geo.GeoHash; import com.github.davidmoten.geo.LatLong; import java.awt.geom.Point2D; import org.geotools.referencing.GeodeticCalculator; import org.geotools.referencing.datum.DefaultEllipsoid; /** * Utilities for parsing Elasticsearch document source and field content to * extract values and create geometries. * */ class ElasticParserUtil { private final static Logger LOGGER = Logging.getLogger(ElasticParserUtil.class); private static final Pattern GEO_POINT_PATTERN; static { GEO_POINT_PATTERN = Pattern.compile("\\s*([-+]?\\d*\\.?\\d*)[^-+\\d.]+([-+]?\\d*\\.?\\d*)\\s*"); } private static final Pattern GEO_HASH_PATTERN; static { GEO_HASH_PATTERN = Pattern.compile("[0123456789bcdefghjkmnpqrstuvwxyz]+"); } private static final Pattern ELASTIC_DISTANCE_PATTERN; static { ELASTIC_DISTANCE_PATTERN = Pattern.compile("([0-9]+(\\.[0-9]+)?)([a-zA-Z]*)"); } private static final double CIRCLE_INTERPOLATION_INTERVAL = 500.0; private static final int MAX_CIRCLE_POINTS = 500; private static final int MIN_CIRCLE_POINTS = 40; private static final double MIN_CIRCLE_RADIUS_M = 0.001; private final GeodeticCalculator geodeticCalculator; private static final Pattern WKT_PATTERN; static { WKT_PATTERN = Pattern.compile("POINT.*|LINESTRING.*|POLYGON.*|MULTIPOINT.*|MULTILINESTRING.*|MULTIPOLYGON.*|GEOMETRYCOLLECTION.*"); } private final GeometryFactory geometryFactory; private final WKTReader wktReader; public ElasticParserUtil() { this.geometryFactory = new GeometryFactory(); this.geodeticCalculator = new GeodeticCalculator(DefaultEllipsoid.WGS84); this.wktReader = new WKTReader(); } /** * Create point geometry given geo_point or geo_shape definition. GeoPoint * can be defined by string, geohash, coordinate array or properties map. * GeoShape is defined by properties map. * * @param obj GeoPoint or GeoShape definition * @return Geometry */ @SuppressWarnings("unchecked") public Geometry createGeometry(Object obj) { final Geometry geometry; if (obj instanceof String) { // geo_point by string final Matcher listMatcher = GEO_POINT_PATTERN.matcher((String) obj); if (listMatcher.matches()) { // coordinate final double y = Double.valueOf(listMatcher.group(1)); final double x = Double.valueOf(listMatcher.group(2)); geometry = geometryFactory.createPoint(new Coordinate(x, y)); } else if (GEO_HASH_PATTERN.matcher((String) obj).matches()) { // geohash final LatLong latLon = GeoHash.decodeHash((String) obj); final Coordinate geoPoint = new Coordinate(latLon.getLon(), latLon.getLat()); final double lat = geoPoint.y; final double lon = geoPoint.x; geometry = geometryFactory.createPoint(new Coordinate(lon, lat)); } else if (WKT_PATTERN.matcher((String) obj).matches()) { // geoshape wkt Geometry geom; try { geom = wktReader.read((String) obj); } catch (ParseException e) { geom = null; } geometry = geom; } else { geometry = null; } } else if (obj instanceof List && ((List) obj).size() == 2) { // geo_point by coordinate array final List values = (List) obj; if (Number.class.isAssignableFrom(values.get(0).getClass())) { final double x = ((Number) values.get(0)).doubleValue(); final double y = ((Number) values.get(1)).doubleValue(); geometry = geometryFactory.createPoint(new Coordinate(x, y)); } else if (values.get(0) instanceof String) { final double x = Double.valueOf((String) values.get(0)); final double y = Double.valueOf((String) values.get(1)); geometry = geometryFactory.createPoint(new Coordinate(x, y)); } else { geometry = null; } } else if (obj instanceof Map) { // geo_shape or geo_point by properties geometry = createGeometry((Map) obj); } else { geometry = null; } return geometry; } /** * Create geometry given property map defining geo_shape type and * coordinates or geo_point lat and lon. * * @param properties Properties * @return Geometry */ @SuppressWarnings({"rawtypes", "unchecked"}) public Geometry createGeometry(final Map properties) { final Geometry geometry; switch (String.valueOf(properties.get("type")).toUpperCase()) { case "POINT": { final List posList; posList = (List) properties.get("coordinates"); final Coordinate coordinate = createCoordinate(posList); geometry = geometryFactory.createPoint(coordinate); break; } case "LINESTRING": { final List> posList; posList = (List) properties.get("coordinates"); final Coordinate[] coordinates = createCoordinates(posList); geometry = geometryFactory.createLineString(coordinates); break; } case "POLYGON": { final List>> posList; posList = (List) properties.get("coordinates"); geometry = createPolygon(posList); break; } case "MULTIPOINT": { final List> posList; posList = (List) properties.get("coordinates"); final Coordinate[] coordinates = createCoordinates(posList); geometry = geometryFactory.createMultiPointFromCoords(coordinates); break; } case "MULTILINESTRING": { final List>> posList; posList = (List) properties.get("coordinates"); final LineString[] lineStrings = new LineString[posList.size()]; for (int i = 0; i < posList.size(); i++) { final Coordinate[] coordinates = createCoordinates(posList.get(i)); lineStrings[i] = geometryFactory.createLineString(coordinates); } geometry = geometryFactory.createMultiLineString(lineStrings); break; } case "MULTIPOLYGON": { final List>>> posList; posList = (List) properties.get("coordinates"); final Polygon[] polygons = new Polygon[posList.size()]; for (int i = 0; i < posList.size(); i++) { polygons[i] = createPolygon(posList.get(i)); } geometry = geometryFactory.createMultiPolygon(polygons); break; } case "GEOMETRYCOLLECTION": { final List> list; list = (List) properties.get("geometries"); final Geometry[] geometries = new Geometry[list.size()]; for (int i = 0; i < geometries.length; i++) { geometries[i] = createGeometry(list.get(i)); } geometry = geometryFactory.createGeometryCollection(geometries); break; } case "ENVELOPE": { final List> posList; posList = (List) properties.get("coordinates"); final Coordinate[] coords = createCoordinates(posList); final Envelope envelope = new Envelope(coords[0], coords[1]); geometry = geometryFactory.toGeometry(envelope); break; } case "CIRCLE": { final List posList; posList = (List) properties.get("coordinates"); final String radius = (String) properties.get("radius"); final Coordinate coordinate = createCoordinate(posList); geometry = createCircle(coordinate, radius); break; } default: // check if this is a geo_point final Object latObj = properties.get("lat"); final Object lonObj = properties.get("lon"); if (latObj != null && lonObj != null) { final Double lat; if (latObj instanceof Number) { lat = ((Number) latObj).doubleValue(); } else if (latObj instanceof String) { lat = new Double((String) latObj); } else { lat = null; } final Double lon; if (lonObj instanceof Number) { lon = ((Number) lonObj).doubleValue(); } else if (lonObj instanceof String) { lon = new Double((String) lonObj); } else { lon = null; } if (lat != null && lon != null) { geometry = geometryFactory.createPoint(new Coordinate(lon, lat)); } else { geometry = null; } } else { geometry = null; } break; } return geometry; } private Polygon createPolygon(final List>> posList) { final Coordinate[] shellCoordinates = createCoordinates(posList.get(0)); final LinearRing shell = geometryFactory.createLinearRing(shellCoordinates); final LinearRing[] holes = new LinearRing[posList.size() - 1]; for (int i = 1; i < posList.size(); i++) { final Coordinate[] coordinates = createCoordinates(posList.get(i)); holes[i - 1] = geometryFactory.createLinearRing(coordinates); } return geometryFactory.createPolygon(shell, holes); } private Coordinate[] createCoordinates(final List> posList) { final Coordinate[] coordinates = new Coordinate[posList.size()]; for (int i = 0; i < posList.size(); i++) { coordinates[i] = createCoordinate(posList.get(i)); } return coordinates; } private Coordinate createCoordinate(final List posList) { if (posList == null) { return null; } final double x; final double y; if (Number.class.isAssignableFrom(posList.get(0).getClass())) { x = ((Number) posList.get(0)).doubleValue(); y = ((Number) posList.get(1)).doubleValue(); } else { x = Double.valueOf(posList.get(0).toString()); y = Double.valueOf(posList.get(1).toString()); } return new Coordinate(x, y); } /** * Read field from document source. * * @param source Source * @param name Field to extract. * @return List of values or empty list if not found */ public List readField(Map source, String name) { final List keys = Arrays.asList(name.split("\\.")); List values = new ArrayList<>(); if (!keys.isEmpty()) { final Object entry = source.get(keys.get(0)); if (entry == null) { readField(source.get(name), new ArrayList<>(), values); } else { readField(entry, keys.subList(1, keys.size()), values); } } final List result; if (!values.isEmpty()) { result = values; } else { result = null; } return result; } private void readField(Object entry, List keys, List values) { if (entry != null && List.class.isAssignableFrom(entry.getClass())) { for (Object object : (List) entry) { readField(object, keys, values); } } else if (entry != null && !keys.isEmpty() && Map.class.isAssignableFrom(entry.getClass())) { final Object nextEntry = ((Map) entry).get(keys.get(0)); final List newKeys = keys.subList(1, keys.size()); readField(nextEntry, newKeys, values); } else if (entry != null) { values.add(entry); } } @SuppressWarnings("rawtypes") public static boolean isGeoPointFeature(Map map) { boolean result = false; if (map.size() == 2 && map.containsKey("coordinates")) { try { result = "geo_point".equals(((Map) map.get("coordinates")).get("type")); } catch (Exception ignored) { } } return result; } public static String urlDecode(String value) { try { value = URLDecoder.decode(value, StandardCharsets.UTF_8.toString()); } catch (UnsupportedEncodingException e) { LOGGER.warning("Unable to encode value(s): " + e); } return value; } /** * Interpolates a JTS polygon from a circle definition. Assumes WGS84 CRS. * * @param centreCoord The centre of the circle * @param radius Consists of a numeric value with a units string appended to * it. * @return A polygon that is an interpolated form of a circle */ private Geometry createCircle(Coordinate centreCoord, String radius) { if (centreCoord == null) { return null; } final double radM; try { radM = convertToMeters(radius); } catch(Exception e) { return null; } // Reject circles with radii below an arbitrary minimum. if (radM < MIN_CIRCLE_RADIUS_M) { return null; } // Interpolate a circle on the surface of the ellipsoid at an arbitrary // interval and then ensure that the number of interpolated points are // within a specified range final double circumferance = radM * 2.0 * Math.PI; int numPoints = (int) (circumferance / CIRCLE_INTERPOLATION_INTERVAL); numPoints = Math.max(MIN_CIRCLE_POINTS, numPoints); numPoints = Math.min(MAX_CIRCLE_POINTS, numPoints); final double angularIncrement = 360.0 / numPoints; geodeticCalculator.setStartingGeographicPoint(centreCoord.x, centreCoord.y); final Coordinate[] linearRingCoords = new Coordinate[numPoints + 1]; double angle = 0.0; for (int i = 0; i < numPoints; i++) { geodeticCalculator.setDirection(angle, radM); Point2D point2D = geodeticCalculator.getDestinationGeographicPoint(); linearRingCoords[i] = new Coordinate(point2D.getX(), point2D.getY()); angle += angularIncrement; } linearRingCoords[numPoints] = linearRingCoords[0]; final LinearRing linearRing = geometryFactory.createLinearRing(linearRingCoords); return geometryFactory.createPolygon(linearRing); } /** * Converts an Elasticsearch distance string consisting of value and unit * into metres. * @param distanceWithUnit String of the form of a decimal number * concatenated with a unit string as defined in * {@link FilterToElasticHelper#UNITS_MAP}. If the unit string is missing * then the number is assumed to be metres. * @return distance in metres. * @throws IllegalArgumentException For invalid unit or format */ static double convertToMeters(String distanceWithUnit) throws IllegalArgumentException { if (distanceWithUnit == null || distanceWithUnit.isEmpty()) { throw new IllegalArgumentException("Null of zero length distance string argument"); } final Matcher matcher = ELASTIC_DISTANCE_PATTERN.matcher(distanceWithUnit); if (matcher.matches()) { final double distance = Double.valueOf(matcher.group(1)); final String unit = matcher.group(3); Double conversion = FilterToElasticHelper.UNITS_MAP.get(unit); if (conversion == null) { if (unit != null && ! unit.isEmpty()) { throw new IllegalArgumentException("Illegal unit: " + unit); } else { conversion = 1.0; } } return distance * conversion; } else { throw new IllegalArgumentException("Distance string argument has incorrect format"); } } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticRequest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; class ElasticRequest { private Map query; private Map>> aggregations; private Integer size; private Integer from; private Integer scroll; private final List> sorts; private final List sourceIncludes; private final List fields; public ElasticRequest() { this.sorts = new ArrayList<>(); this.fields = new ArrayList<>(); this.sourceIncludes = new ArrayList<>(); } public Map getQuery() { return query; } public void setQuery(Map query) { this.query = query; } public Map>> getAggregations() { return aggregations; } public void setAggregations(Map>> aggregations) { this.aggregations = aggregations; } public Integer getSize() { return size; } public void setSize(Integer size) { this.size = size; } public Integer getFrom() { return from; } public void setFrom(Integer from) { this.from = from; } public Integer getScroll() { return scroll; } public void setScroll(Integer scroll) { this.scroll = scroll; } public List> getSorts() { return sorts; } public void addSort(String key, String order) { this.sorts.add(Collections.singletonMap(key, Collections.singletonMap("order", order))); } public List getSourceIncludes() { return sourceIncludes; } public void addSourceInclude(String sourceInclude) { this.sourceIncludes.add(sourceInclude); } public List getFields() { return fields; } public void addField(String field) { this.fields.add(field); } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticResponse.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.util.ArrayList; import java.util.List; import java.util.Map; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; @SuppressWarnings("unused") @JsonIgnoreProperties(ignoreUnknown=true) public class ElasticResponse { @JsonProperty("hits") private ElasticResults results; @JsonProperty("aggregations") private Map aggregations; @JsonProperty("_scroll_id") private String scrollId; public ElasticResults getResults() { return results; } public Map getAggregations() { return aggregations; } public String getScrollId() { return scrollId; } @JsonIgnore public List getHits() { final List hits; if (results != null) { hits = results.getHits(); } else { hits = new ArrayList<>(); } return hits; } public int getNumHits() { final int numHits; if (results != null) { numHits = results.getHits().size(); } else { numHits = 0; } return numHits; } public long getTotalNumHits() { final long total; if (results != null && results.getTotal() != null) { total = results.getTotal(); } else { total = 0L; } return total; } public float getMaxScore() { final float maxScore; if (results != null && results.getMaxScore() != null) { maxScore = results.getMaxScore(); } else { maxScore = 0f; } return maxScore; } @Override public String toString() { return "ElasticResponse[total=" + getTotalNumHits() + ", hits=" + getNumHits() + ", aggregations=" + aggregations + ", scrollId=" + scrollId + ", maxScore=" + getMaxScore() + "]"; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/ElasticResults.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import java.util.List; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; @SuppressWarnings("unused") @JsonIgnoreProperties(ignoreUnknown=true) public class ElasticResults { @JsonDeserialize(using = TotalDeserializer.class) private Long total; @JsonProperty("max_score") private Float maxScore; private List hits; public Long getTotal() { return total; } public Float getMaxScore() { return maxScore; } public List getHits() { return hits; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/FilterToElastic.java ================================================ /* * GeoTools - The Open Source Java GIS Toolkit * http://geotools.org * * (C) 2002-2008, Open Source Geospatial Foundation (OSGeo) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package mil.nga.giat.data.elasticsearch; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Logger; import static mil.nga.giat.data.elasticsearch.ElasticConstants.ANALYZED; import static mil.nga.giat.data.elasticsearch.ElasticConstants.DATE_FORMAT; import static mil.nga.giat.data.elasticsearch.ElasticConstants.MATCH_ALL; import static mil.nga.giat.data.elasticsearch.ElasticConstants.NESTED; import org.geotools.data.Query; import org.geotools.factory.CommonFactoryFinder; import org.geotools.filter.Capabilities; import org.geotools.geojson.geom.GeometryJSON; import org.geotools.util.ConverterFactory; import org.geotools.util.Converters; import org.geotools.util.factory.Hints; import org.geotools.util.logging.Logging; import org.locationtech.jts.geom.CoordinateSequence; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LinearRing; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.filter.And; import org.opengis.filter.BinaryComparisonOperator; import org.opengis.filter.BinaryLogicOperator; import org.opengis.filter.ExcludeFilter; import org.opengis.filter.Filter; import org.opengis.filter.FilterFactory; import org.opengis.filter.FilterVisitor; import org.opengis.filter.Id; import org.opengis.filter.IncludeFilter; import org.opengis.filter.Not; import org.opengis.filter.Or; import org.opengis.filter.PropertyIsBetween; import org.opengis.filter.PropertyIsEqualTo; import org.opengis.filter.PropertyIsGreaterThan; import org.opengis.filter.PropertyIsGreaterThanOrEqualTo; import org.opengis.filter.PropertyIsLessThan; import org.opengis.filter.PropertyIsLessThanOrEqualTo; import org.opengis.filter.PropertyIsLike; import org.opengis.filter.PropertyIsNil; import org.opengis.filter.PropertyIsNotEqualTo; import org.opengis.filter.PropertyIsNull; import org.opengis.filter.expression.Add; import org.opengis.filter.expression.BinaryExpression; import org.opengis.filter.expression.Divide; import org.opengis.filter.expression.Expression; import org.opengis.filter.expression.ExpressionVisitor; import org.opengis.filter.expression.Function; import org.opengis.filter.expression.Literal; import org.opengis.filter.expression.Multiply; import org.opengis.filter.expression.NilExpression; import org.opengis.filter.expression.PropertyName; import org.opengis.filter.expression.Subtract; import org.opengis.filter.identity.Identifier; import org.opengis.filter.spatial.BBOX; import org.opengis.filter.spatial.Beyond; import org.opengis.filter.spatial.BinarySpatialOperator; import org.opengis.filter.spatial.Contains; import org.opengis.filter.spatial.Crosses; import org.opengis.filter.spatial.DWithin; import org.opengis.filter.spatial.Disjoint; import org.opengis.filter.spatial.Equals; import org.opengis.filter.spatial.Intersects; import org.opengis.filter.spatial.Overlaps; import org.opengis.filter.spatial.Touches; import org.opengis.filter.spatial.Within; import org.opengis.filter.temporal.After; import org.opengis.filter.temporal.AnyInteracts; import org.opengis.filter.temporal.Before; import org.opengis.filter.temporal.Begins; import org.opengis.filter.temporal.BegunBy; import org.opengis.filter.temporal.BinaryTemporalOperator; import org.opengis.filter.temporal.During; import org.opengis.filter.temporal.EndedBy; import org.opengis.filter.temporal.Ends; import org.opengis.filter.temporal.Meets; import org.opengis.filter.temporal.MetBy; import org.opengis.filter.temporal.OverlappedBy; import org.opengis.filter.temporal.TContains; import org.opengis.filter.temporal.TEquals; import org.opengis.filter.temporal.TOverlaps; import org.opengis.temporal.Period; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import mil.nga.giat.shaded.es.common.joda.Joda; import mil.nga.giat.shaded.joda.time.format.DateTimeFormatter; /** * Encodes an OGC {@link Filter} and creates a filter for an Elasticsearch query. * Optionally applies SQL View parameters from {@link Query} defining Elasticsearch * query directly. * * Based on org.geotools.data.jdbc.FilterToSQL in the GeoTools library/jdbc module. */ class FilterToElastic implements FilterVisitor, ExpressionVisitor { /** Standard java logger */ static final Logger LOGGER = Logging.getLogger(FilterToElastic.class); /** filter factory */ private static final FilterFactory filterFactory = CommonFactoryFinder.getFilterFactory(null); private static final ObjectMapper mapper = new ObjectMapper(); private static final ObjectReader mapReader = mapper.readerWithView(Map.class).forType(HashMap.class); private static final DateTimeFormatter DEFAULT_DATE_FORMATTER = Joda.forPattern("date_optional_time").printer(); /** The filter types that this class can encode */ private Capabilities capabilities = null; /** the schmema the encoder will use */ SimpleFeatureType featureType; Geometry currentGeometry; Object field; Map currentShapeBuilder; Boolean fullySupported; Map queryBuilder; Map nativeQueryBuilder; Map>> aggregations; private final FilterToElasticHelper helper; private String key; private Object lower; private Boolean nested; private String path; private String op; private Object begin; private Object end; private DateTimeFormatter dateFormatter; public FilterToElastic() { queryBuilder = MATCH_ALL; nativeQueryBuilder = ImmutableMap.of("match_all", Collections.EMPTY_MAP); helper = new FilterToElasticHelper(this); } /** * Performs the encoding. * * @param filter the Filter to be encoded. * * @throws FilterToElasticException If there were io problems. */ public void encode(Filter filter) throws FilterToElasticException { fullySupported = getCapabilities().fullySupports(filter); filter.accept(this, null); } /** * Performs the encoding. * If SQL View parameters are provided in the query hints, they will be used * to define and/or update the query. * * @param query the Query to be encoded. * * @throws FilterToElasticException If there were io problems. */ public void encode(Query query) throws FilterToElasticException { encode(query.getFilter()); addViewParams(query); } /** * Sets the featuretype the encoder is encoding for. *

    * This is used for context for attribute expressions. *

    * * @param featureType Feature tag */ public void setFeatureType(SimpleFeatureType featureType) { this.featureType = featureType; } /** * Sets the capabilities of this filter. * * @return Capabilities for this Filter */ Capabilities createCapabilities() { return new ElasticCapabilities(); } /** * Describes the capabilities of this encoder. * *

    * Performs lazy creation of capabilities. *

    * * If you're extending this class, override {@link #createCapabilities()} to declare which capabilities you * support. Don't use this method. * * @return The capabilities supported by this encoder. */ private synchronized Capabilities getCapabilities() { if (capabilities == null) { capabilities = createCapabilities(); } return capabilities; //maybe clone? Make immutable somehow } // BEGIN IMPLEMENTING org.opengis.filter.FilterVisitor METHODS /** * Writes the FilterBuilder for the ExcludeFilter. * * @param filter the filter to be visited */ public Object visit(ExcludeFilter filter, Object extraData) { queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must_not", MATCH_ALL)); return extraData; } /** * Writes the FilterBuilder for the IncludeFilter. * * @param filter the filter to be visited * */ public Object visit(IncludeFilter filter, Object extraData) { queryBuilder = MATCH_ALL; return extraData; } /** * Writes the FilterBuilder for the PropertyIsBetween Filter. * * @param filter the Filter to be visited. * */ public Object visit(PropertyIsBetween filter, Object extraData) { LOGGER.finest("exporting PropertyIsBetween"); Expression expr = filter.getExpression(); Expression lowerbounds = filter.getLowerBoundary(); Expression upperbounds = filter.getUpperBoundary(); Class context; nested = false; AttributeDescriptor attType = (AttributeDescriptor)expr.evaluate(featureType); if (attType != null) { context = attType.getType().getBinding(); if (attType.getUserData().containsKey(NESTED)) { nested = (Boolean) attType.getUserData().get(NESTED); } if (Date.class.isAssignableFrom(context)) { updateDateFormatter(attType); } } else { //assume it's a string? context = String.class; } expr.accept(this, extraData); key = (String) field; lowerbounds.accept(this, context); lower = field; upperbounds.accept(this, context); Object upper = field; if(nested) { path = extractNestedPath(key); } queryBuilder = ImmutableMap.of("range", ImmutableMap.of(key, ImmutableMap.of("gte", lower, "lte", upper))); if(nested) { queryBuilder = ImmutableMap.of("nested", ImmutableMap.of("path", path, "query", queryBuilder)); } return extraData; } /** * Writes the FilterBuilder for the Like Filter. * * @param filter the filter to be visited * */ public Object visit(PropertyIsLike filter, Object extraData) { char esc = filter.getEscape().charAt(0); char multi = filter.getWildCard().charAt(0); char single = filter.getSingleChar().charAt(0); if (filter.isMatchingCase()) { LOGGER.fine("Case sensitive search not supported"); } String literal = filter.getLiteral(); Expression att = filter.getExpression(); AttributeDescriptor attType = (AttributeDescriptor) att.evaluate(featureType); Boolean analyzed = false; nested = false; if (attType != null) { if (attType.getUserData().containsKey(ANALYZED)) { analyzed = (Boolean) attType.getUserData().get(ANALYZED); } if (attType.getUserData().containsKey(NESTED)) { nested = (Boolean) attType.getUserData().get(NESTED); } if (Date.class.isAssignableFrom(attType.getType().getBinding())) { updateDateFormatter(attType); } } att.accept(this, extraData); key = (String) field; String pattern; if (analyzed) { // use query string query post filter for analyzed fields pattern = convertToQueryString(esc, multi, single, literal); } else { // default to regexp filter pattern = convertToRegex(esc, multi, single, literal); } if (nested) { path = extractNestedPath(key); } if (analyzed) { // use query string query for analyzed fields queryBuilder = ImmutableMap.of("query_string", ImmutableMap.of("query", pattern, "default_field", key)); } else { // default to regexp query queryBuilder = ImmutableMap.of("regexp", ImmutableMap.of(key, pattern)); } if (nested) { queryBuilder = ImmutableMap.of("nested", ImmutableMap.of("path", path, "query", queryBuilder)); } return extraData; } /** * Write the FilterBuilder for an And filter * * @param filter the filter to visit * @param extraData extra data (unused by this method) * */ public Object visit(And filter, Object extraData) { return visit((BinaryLogicOperator)filter, "AND"); } /** * Write the FilterBuilder for a Not filter * * @param filter the filter to visit * @param extraData extra data (unused by this method) * */ public Object visit(Not filter, Object extraData) { if(filter.getFilter() instanceof PropertyIsNull) { Expression expr = ((PropertyIsNull) filter.getFilter()).getExpression(); expr.accept(this, extraData); } else { filter.getFilter().accept(this, extraData); } if(filter.getFilter() instanceof PropertyIsNull) { queryBuilder = ImmutableMap.of("exists", ImmutableMap.of("field", field)); } else { queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must_not", queryBuilder)); } return extraData; } /** * Write the FilterBuilder for an Or filter * * @param filter the filter to visit * @param extraData extra data (unused by this method) * */ public Object visit(Or filter, Object extraData) { return visit((BinaryLogicOperator)filter, "OR"); } /** * Common implementation for BinaryLogicOperator filters. This way * they're all handled centrally. * * @param filter the logic statement. * @param extraData extra filter data. Not modified directly by this method. */ private Object visit(BinaryLogicOperator filter, Object extraData) { LOGGER.finest("exporting LogicFilter"); final List> filters = new ArrayList<>(); for (final Filter child : filter.getChildren()) { child.accept(this, extraData); filters.add(queryBuilder); } if (extraData.equals("AND")) { queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must", filters)); } else if (extraData.equals("OR")) { queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("should", filters)); } return extraData; } /** * Write the FilterBuilder for this kind of filter * * @param filter the filter to visit * @param extraData extra data (unused by this method) * */ public Object visit(PropertyIsEqualTo filter, Object extraData) { visitBinaryComparisonOperator(filter, "="); return extraData; } /** * Write the FilterBuilder for this kind of filter * * @param filter the filter to visit * @param extraData extra data (unused by this method) * */ public Object visit(PropertyIsGreaterThanOrEqualTo filter, Object extraData) { visitBinaryComparisonOperator(filter, ">="); return extraData; } /** * Write the FilterBuilder for this kind of filter * * @param filter the filter to visit * @param extraData extra data (unused by this method) * */ public Object visit(PropertyIsGreaterThan filter, Object extraData) { visitBinaryComparisonOperator(filter, ">"); return extraData; } /** * Write the FilterBuilder for this kind of filter * * @param filter the filter to visit * @param extraData extra data (unused by this method) * */ public Object visit(PropertyIsLessThan filter, Object extraData) { visitBinaryComparisonOperator(filter, "<"); return extraData; } /** * Write the FilterBuilder for this kind of filter * * @param filter the filter to visit * @param extraData extra data (unused by this method) * */ public Object visit(PropertyIsLessThanOrEqualTo filter, Object extraData) { visitBinaryComparisonOperator(filter, "<="); return extraData; } /** * Write the FilterBuilder for this kind of filter * * @param filter the filter to visit * @param extraData extra data (unused by this method) * */ public Object visit(PropertyIsNotEqualTo filter, Object extraData) { visitBinaryComparisonOperator(filter, "!="); return extraData; } /** * Common implementation for BinaryComparisonOperator filters. * * @param filter the comparison. * */ private void visitBinaryComparisonOperator(BinaryComparisonOperator filter, Object extraData) { LOGGER.finest("exporting FilterBuilder ComparisonFilter"); Expression left = filter.getExpression1(); Expression right = filter.getExpression2(); if (isBinaryExpression(left) || isBinaryExpression(right)) { throw new UnsupportedOperationException("Binary expressions not supported"); } AttributeDescriptor attType = null; Class leftContext = null, rightContext = null; if (left instanceof PropertyName) { // It's a propertyname, we should get the class and pass it in // as context to the tree walker. attType = (AttributeDescriptor)left.evaluate(featureType); if (attType != null) { rightContext = attType.getType().getBinding(); } } if (right instanceof PropertyName) { attType = (AttributeDescriptor)right.evaluate(featureType); if (attType != null) { leftContext = attType.getType().getBinding(); } } nested = false; if (attType != null) { if (attType.getUserData().containsKey(NESTED)) { nested = (Boolean) attType.getUserData().get(NESTED); } if (Date.class.isAssignableFrom(attType.getType().getBinding())) { updateDateFormatter(attType); } } //case sensitivity if ( !filter.isMatchingCase() ) { //we only do for = and != if ( filter instanceof PropertyIsEqualTo || filter instanceof PropertyIsNotEqualTo ) { //and only for strings if ( String.class.equals( leftContext ) || String.class.equals( rightContext ) ) { //matchCase = false; LOGGER.fine("Case insensitive filter not supported"); } } } String type = (String) extraData; if (left instanceof PropertyName) { left.accept(this, null); key = (String) field; right.accept(this, rightContext); } else { right.accept(this, null); key = (String) field; left.accept(this, leftContext); } if (nested) { path = extractNestedPath(key); } switch (type) { case "=": queryBuilder = ImmutableMap.of("term", ImmutableMap.of(key, field)); break; case "!=": queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must_not", ImmutableMap.of("term", ImmutableMap.of(key, field)))); break; case ">": queryBuilder = ImmutableMap.of("range", ImmutableMap.of(key, ImmutableMap.of("gt", field))); break; case ">=": queryBuilder = ImmutableMap.of("range", ImmutableMap.of(key, ImmutableMap.of("gte", field))); break; case "<": queryBuilder = ImmutableMap.of("range", ImmutableMap.of(key, ImmutableMap.of("lt", field))); break; case "<=": queryBuilder = ImmutableMap.of("range", ImmutableMap.of(key, ImmutableMap.of("lte", field))); break; } if (nested) { queryBuilder = ImmutableMap.of("nested", ImmutableMap.of("path", path, "query", queryBuilder)); } } /* * determines if the function is a binary expression */ private boolean isBinaryExpression(Expression e) { return e instanceof BinaryExpression; } /** * Writes the FilterBuilder for the Null Filter. * * @param filter the null filter. * */ public Object visit(PropertyIsNull filter, Object extraData) { LOGGER.finest("exporting NullFilter"); Expression expr = filter.getExpression(); expr.accept(this, extraData); queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must_not", ImmutableMap.of("exists", ImmutableMap.of("field", field)))); return extraData; } public Object visit(PropertyIsNil filter, Object extraData) { throw new UnsupportedOperationException("isNil not supported"); } /** * Encodes an Id filter * * @param filter the * */ public Object visit(Id filter, Object extraData) { final List idList = new ArrayList<>(); for (final Identifier id : filter.getIdentifiers()) { idList.add(id.toString()); } queryBuilder = ImmutableMap.of("ids", ImmutableMap.of("values", idList)); return extraData; } public Object visit(BBOX filter, Object extraData) { return visitBinarySpatialOperator(filter, extraData); } public Object visit(Beyond filter, Object extraData) { return visitBinarySpatialOperator(filter, extraData); } public Object visit(Contains filter, Object extraData) { return visitBinarySpatialOperator(filter, extraData); } public Object visit(Crosses filter, Object extraData) { return visitBinarySpatialOperator(filter, extraData); } public Object visit(Disjoint filter, Object extraData) { return visitBinarySpatialOperator(filter, extraData); } public Object visit(DWithin filter, Object extraData) { return visitBinarySpatialOperator(filter, extraData); } public Object visit(Equals filter, Object extraData) { return visitBinarySpatialOperator(filter, extraData); } public Object visit(Intersects filter, Object extraData) { return visitBinarySpatialOperator(filter, extraData); } public Object visit(Overlaps filter, Object extraData) { return visitBinarySpatialOperator(filter, extraData); } public Object visit(Touches filter, Object extraData) { return visitBinarySpatialOperator(filter, extraData); } public Object visit(Within filter, Object extraData) { return visitBinarySpatialOperator(filter, extraData); } private Object visitBinarySpatialOperator(BinarySpatialOperator filter, Object extraData) { // basic checks if (filter == null) throw new NullPointerException( "Filter to be encoded cannot be null"); // extract the property name and the geometry literal Expression e1 = filter.getExpression1(); Expression e2 = filter.getExpression2(); if (e1 instanceof Literal && e2 instanceof PropertyName) { e1 = filter.getExpression2(); e2 = filter.getExpression1(); } if (e1 instanceof PropertyName && e2 instanceof Literal) { //call the "regular" method return visitBinarySpatialOperator(filter, (PropertyName)e1, (Literal)e2, filter .getExpression1() instanceof Literal, extraData); } else { //call the join version return visitBinarySpatialOperator(filter, e1, e2, extraData); } } private Object visitBinaryTemporalOperator(BinaryTemporalOperator filter, Object extraData) { if (filter == null) { throw new NullPointerException("Null filter"); } Expression e1 = filter.getExpression1(); Expression e2 = filter.getExpression2(); if (e1 instanceof Literal && e2 instanceof PropertyName) { e1 = filter.getExpression2(); e2 = filter.getExpression1(); } if (e1 instanceof PropertyName && e2 instanceof Literal) { //call the "regular" method return visitBinaryTemporalOperator(filter, (PropertyName)e1, (Literal)e2, filter.getExpression1() instanceof Literal, extraData); } else { //call the join version return visitBinaryTemporalOperator(); } } /** * Handles the common case of a PropertyName,Literal geometry binary temporal operator. *

    * Subclasses should override if they support more temporal operators than what is handled in * this base class. *

    */ private Object visitBinaryTemporalOperator(BinaryTemporalOperator filter, PropertyName property, Literal temporal, boolean swapped, Object extraData) { AttributeDescriptor attType = (AttributeDescriptor)property.evaluate(featureType); Class typeContext = null; nested = false; if (attType != null) { typeContext = attType.getType().getBinding(); if (attType.getUserData().containsKey(NESTED)) { nested = (Boolean) attType.getUserData().get(NESTED); } updateDateFormatter(attType); } //check for time period Period period = null; if (temporal.evaluate(null) instanceof Period) { period = (Period) temporal.evaluate(null); } //verify that those filters that require a time period have one if ((filter instanceof Begins || filter instanceof BegunBy || filter instanceof Ends || filter instanceof EndedBy || filter instanceof During || filter instanceof TContains) && period == null) { throw new IllegalArgumentException("Filter requires a time period"); } if (filter instanceof TEquals && period != null) { throw new IllegalArgumentException("TEquals filter does not accept time period"); } //ensure the time period is the correct argument if ((filter instanceof Begins || filter instanceof Ends || filter instanceof During) && swapped) { throw new IllegalArgumentException("Time period must be second argument of Filter"); } if ((filter instanceof BegunBy || filter instanceof EndedBy || filter instanceof TContains) && !swapped) { throw new IllegalArgumentException("Time period must be first argument of Filter"); } key = ""; if (filter instanceof After || filter instanceof Before) { op = filter instanceof After ? " > " : " < "; if (period != null) { property.accept(this, extraData); key = (String) field; visitBegin(period, extraData); begin = field; visitEnd(period, extraData); end = field; } else { property.accept(this, extraData); key = (String) field; temporal.accept(this, typeContext); } } else if (filter instanceof Begins || filter instanceof Ends || filter instanceof BegunBy || filter instanceof EndedBy ) { property.accept(this, extraData); key = (String) field; if (filter instanceof Begins || filter instanceof BegunBy) { visitBegin(period, extraData); } else { visitEnd(period, extraData); } } else if (filter instanceof During || filter instanceof TContains){ property.accept(this, extraData); key = (String) field; visitBegin(period, extraData); lower = field; visitEnd(period, extraData); } else if (filter instanceof TEquals) { property.accept(this, extraData); key = (String) field; temporal.accept(this, typeContext); } if (nested) { path = extractNestedPath(key); } if (filter instanceof After || filter instanceof Before) { if (period != null) { if ((op.equals(" > ") && !swapped) || (op.equals(" < ") && swapped)) { queryBuilder = ImmutableMap.of("range", ImmutableMap.of(key, ImmutableMap.of("gt", end))); } else { queryBuilder = ImmutableMap.of("range", ImmutableMap.of(key, ImmutableMap.of("lt", begin))); } } else { if (op.equals(" < ") || swapped) { queryBuilder = ImmutableMap.of("range", ImmutableMap.of(key, ImmutableMap.of("lt", field))); } else { queryBuilder = ImmutableMap.of("range", ImmutableMap.of(key, ImmutableMap.of("gt", field))); } } } else if (filter instanceof Begins || filter instanceof Ends || filter instanceof BegunBy || filter instanceof EndedBy ) { queryBuilder = ImmutableMap.of("term", ImmutableMap.of(key, field)); } else if (filter instanceof During || filter instanceof TContains){ queryBuilder = ImmutableMap.of("range", ImmutableMap.of(key, ImmutableMap.of("gt", lower, "lt", field))); } else if (filter instanceof TEquals) { queryBuilder = ImmutableMap.of("term", ImmutableMap.of(key, field)); } if (nested) { queryBuilder = ImmutableMap.of("nested", ImmutableMap.of("path", path, "query", queryBuilder)); } return extraData; } private void visitBegin(Period p, Object extraData) { filterFactory.literal(p.getBeginning().getPosition().getDate()).accept(this, extraData); } private void visitEnd(Period p, Object extraData) { filterFactory.literal(p.getEnding().getPosition().getDate()).accept(this, extraData); } /** * Handles the general case of two expressions in a binary temporal filter. *

    * Subclasses should override if they support more temporal operators than what is handled in * this base class. *

    */ Object visitBinaryTemporalOperator() { throw new UnsupportedOperationException("Join version of binary temporal operator not supported"); } /** * Encodes a null filter value. The current implementation * does exactly nothing. * @param extraData extra data to be used to evaluate the filter * @return the untouched extraData parameter */ public Object visitNullFilter(Object extraData) { return extraData; } // END IMPLEMENTING org.opengis.filter.FilterVisitor METHODS // START IMPLEMENTING org.opengis.filter.ExpressionVisitor METHODS /** * Writes the FilterBuilder for the attribute Expression. * * @param expression the attribute. * */ @Override public Object visit(PropertyName expression, Object extraData) { LOGGER.finest("exporting PropertyName"); SimpleFeatureType featureType = this.featureType; Class target = null; if(extraData instanceof Class) { target = (Class) extraData; } //first evaluate expression against feature type get the attribute, AttributeDescriptor attType = (AttributeDescriptor) expression.evaluate(featureType); String encodedField; if ( attType != null ) { Map userData = attType.getUserData(); if( userData != null && userData.containsKey("full_name") ) { encodedField = userData.get("full_name").toString(); } else { encodedField = attType.getLocalName(); } if(target != null && target.isAssignableFrom(attType.getType().getBinding())) { // no need for casting, it's already the right type target = null; } } else { // fall back to just encoding the property name encodedField = expression.getPropertyName(); } if (target != null) { LOGGER.fine("PropertyName type casting not implemented"); } field = encodedField; return extraData; } /** * Export the contents of a Literal Expresion * * @param expression * the Literal to export * * @throws FilterToElasticException If there were io problems. */ @Override public Object visit(Literal expression, Object context) throws FilterToElasticException { LOGGER.finest("exporting LiteralExpression"); // type to convert the literal to Class target = null; if ( context instanceof Class ) { target = (Class) context; } try { //evaluate the expression Object literal = evaluateLiteral( expression, target ); // handle geometry case if (literal instanceof Geometry) { // call this method for backwards compatibility with subclasses visitLiteralGeometry(filterFactory.literal(literal)); } else { // write out the literal allowing subclasses to override this // behaviour (for writing out dates and the like using the BDMS custom functions) writeLiteral(literal); } } catch (IOException e) { throw new FilterToElasticException("IO problems writing literal", e); } return context; } private Object evaluateLiteral(Literal expression, Class target) { Object literal = null; // HACK: let expression figure out the right value for numbers, // since the context is almost always improperly set and the // numeric converters try to force floating points to integrals // JD: the above is no longer true, so instead do a safe conversion if(target != null) { // use the target type if (Number.class.isAssignableFrom(target)) { literal = safeConvertToNumber(expression, target); if (literal == null) { literal = safeConvertToNumber(expression, Number.class); } } else { literal = expression.evaluate(null, target); } } //check for conversion to number if (target == null) { // we don't know the target type, check for a conversion to a number Number number = safeConvertToNumber(expression, Number.class); if (number != null) { literal = number; } } // if the target was not known, of the conversion failed, try the // type guessing dance literal expression does only for the following // method call if(literal == null) literal = expression.evaluate(null); // if that failed as well, grab the value as is if(literal == null) literal = expression.getValue(); return literal; } /** * Writes out a non null, non geometry literal. The base class properly handles * null, numeric and booleans (true|false), and turns everything else into a string. * Subclasses are expected to override this shall they need a different treatment * (e.g. for dates) * @param literal Literal */ private void writeLiteral(Object literal) { field = literal; if (Date.class.isAssignableFrom(literal.getClass())) { field = dateFormatter.print(((Date) literal).getTime()); } } void visitLiteralTimePeriod() { throw new UnsupportedOperationException("Time periods not supported, subclasses must implement this " + "method to support encoding timeperiods"); } public Object visit(Add expression, Object extraData) { throw new UnsupportedOperationException("Add expressions not supported"); } public Object visit(Divide expression, Object extraData) { throw new UnsupportedOperationException("Divide expressions not supported"); } public Object visit(Multiply expression, Object extraData) { throw new UnsupportedOperationException("Multiply expressions not supported"); } public Object visit(Subtract expression, Object extraData) { throw new UnsupportedOperationException("Subtract expressions not supported"); } public Object visit(NilExpression expression, Object extraData) { field = null; return extraData; } //temporal filters, not supported public Object visit(After after, Object extraData) { return visitBinaryTemporalOperator(after, extraData); } public Object visit(AnyInteracts anyInteracts, Object extraData) { return visitBinaryTemporalOperator(anyInteracts, extraData); } public Object visit(Before before, Object extraData) { return visitBinaryTemporalOperator(before, extraData); } public Object visit(Begins begins, Object extraData) { return visitBinaryTemporalOperator(begins, extraData); } public Object visit(BegunBy begunBy, Object extraData) { return visitBinaryTemporalOperator(begunBy, extraData); } public Object visit(During during, Object extraData) { return visitBinaryTemporalOperator(during, extraData); } public Object visit(EndedBy endedBy, Object extraData) { return visitBinaryTemporalOperator(endedBy, extraData); } public Object visit(Ends ends, Object extraData) { return visitBinaryTemporalOperator(ends, extraData); } public Object visit(Meets meets, Object extraData) { return visitBinaryTemporalOperator(meets, extraData); } public Object visit(MetBy metBy, Object extraData) { return visitBinaryTemporalOperator(metBy, extraData); } public Object visit(OverlappedBy overlappedBy, Object extraData) { return visitBinaryTemporalOperator(overlappedBy, extraData); } public Object visit(TContains contains, Object extraData) { return visitBinaryTemporalOperator(contains, extraData); } public Object visit(TEquals equals, Object extraData) { return visitBinaryTemporalOperator(equals, extraData); } public Object visit(TOverlaps contains, Object extraData) { return visitBinaryTemporalOperator(contains, extraData); } private void visitLiteralGeometry(Literal expression) throws IOException { // evaluate the literal and store it for later currentGeometry = (Geometry) evaluateLiteral(expression, Geometry.class); if ( currentGeometry instanceof LinearRing ) { // convert LinearRing to LineString final GeometryFactory factory = currentGeometry.getFactory(); final LinearRing linearRing = (LinearRing) currentGeometry; final CoordinateSequence coordinates; coordinates = linearRing.getCoordinateSequence(); currentGeometry = factory.createLineString(coordinates); } final String geoJson = new GeometryJSON().toString(currentGeometry); currentShapeBuilder = mapReader.readValue(geoJson); } private Object visitBinarySpatialOperator(BinarySpatialOperator filter, PropertyName property, Literal geometry, boolean swapped, Object extraData) { return helper.visitBinarySpatialOperator(filter, property, geometry, swapped, extraData); } private Object visitBinarySpatialOperator(BinarySpatialOperator filter, Expression e1, Expression e2, Object extraData) { return helper.visitBinarySpatialOperator(filter, e1, e2, extraData); } @Override public Object visit(Function function, Object extraData) { throw new UnsupportedOperationException("Function support not implemented"); } // END IMPLEMENTING org.opengis.filter.ExpressionVisitor METHODS private void updateDateFormatter(AttributeDescriptor attType) { dateFormatter = DEFAULT_DATE_FORMATTER; if (attType != null) { final String format = (String) attType.getUserData().get(DATE_FORMAT); if (format != null) { dateFormatter = Joda.forPattern(format).printer(); } } } /* * helper to do a safe convesion of expression to a number */ private Number safeConvertToNumber(Expression expression, Class target) { return (Number) Converters.convert(expression.evaluate(null), target, new Hints(ConverterFactory.SAFE_CONVERSION, true)); } void addViewParams(Query query) { if (query.getHints() != null && query.getHints().get(Hints.VIRTUAL_TABLE_PARAMETERS) != null) { @SuppressWarnings("unchecked") Map parameters = (Map) query.getHints().get(Hints.VIRTUAL_TABLE_PARAMETERS); boolean nativeOnly = false; for (final Map.Entry entry : parameters.entrySet()) { if (entry.getKey().equalsIgnoreCase("native-only")) { nativeOnly = Boolean.parseBoolean(entry.getValue()); } } if (nativeOnly) { LOGGER.fine("Ignoring GeoServer filter (Elasticsearch native query/post filter only)"); queryBuilder = MATCH_ALL; } for (final Map.Entry entry : parameters.entrySet()) { if (entry.getKey().equalsIgnoreCase("q")) { final String value = entry.getValue(); try { nativeQueryBuilder = mapReader.readValue(value); } catch (Exception e) { // retry with decoded value try { nativeQueryBuilder = mapReader.readValue(ElasticParserUtil.urlDecode(value)); } catch (Exception e2) { throw new FilterToElasticException("Unable to parse native query", e); } } } if (entry.getKey().equalsIgnoreCase("a")) { final ObjectMapper mapper = new ObjectMapper(); final TypeReference>>> type; type = new TypeReference>>>() {}; final String value = entry.getValue(); try { this.aggregations = mapper.readValue(value, type); } catch (Exception e) { try { this.aggregations = mapper.readValue(ElasticParserUtil.urlDecode(value), type); } catch (Exception e2) { throw new FilterToElasticException("Unable to parse aggregation", e); } } } } } } public static String convertToQueryString(char escape, char multi, char single, String pattern) { StringBuilder result = new StringBuilder(pattern.length()+5); for (int i = 0; i < pattern.length(); i++) { char chr = pattern.charAt(i); if (chr == escape) { // emit the next char and skip it if (i!= (pattern.length()-1) ) { result.append("\\"); result.append( pattern.charAt(i+1) ); } i++; // skip next char } else if (chr == single) { result.append('?'); } else if (chr == multi) { result.append('*'); } else { result.append(chr); } } return result.toString(); } public static String convertToRegex(char escape, char multi, char single, String pattern) { StringBuilder result = new StringBuilder(pattern.length()+5); for (int i = 0; i < pattern.length(); i++) { char chr = pattern.charAt(i); if (chr == escape) { // emit the next char and skip it if (i!= (pattern.length()-1) ) { result.append("\\"); result.append( pattern.charAt(i+1) ); } i++; // skip next char } else if (chr == single) { result.append('.'); } else if (chr == multi) { result.append(".*"); } else { result.append(chr); } } return result.toString(); } private static String extractNestedPath(String field) { final String[] parts = field.split("\\."); final String base = parts[parts.length-1]; return field.replace("." + base, ""); } public Boolean getFullySupported() { return fullySupported; } public Map getNativeQueryBuilder() { return nativeQueryBuilder; } public Map getQueryBuilder() { final Map queryBuilder; if (nativeQueryBuilder.equals(MATCH_ALL)) { queryBuilder = this.queryBuilder; } else if (this.queryBuilder.equals(MATCH_ALL)) { queryBuilder = nativeQueryBuilder; } else { queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must", ImmutableList.of(nativeQueryBuilder, this.queryBuilder))); } return queryBuilder; } public Map>> getAggregations() { return aggregations; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/FilterToElasticException.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; class FilterToElasticException extends RuntimeException { private static final long serialVersionUID = 1819999351118120451L; public FilterToElasticException(String msg, Throwable exp) { super(msg, exp); } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/FilterToElasticHelper.java ================================================ /* * GeoTools - The Open Source Java GIS Toolkit * http://geotools.org * * (C) 2002-2009, Open Source Geospatial Foundation (OSGeo) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package mil.nga.giat.data.elasticsearch; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static mil.nga.giat.data.elasticsearch.ElasticConstants.GEOMETRY_TYPE; import static mil.nga.giat.data.elasticsearch.ElasticConstants.MATCH_ALL; import mil.nga.giat.data.elasticsearch.ElasticAttribute.ElasticGeometryType; import org.geotools.factory.CommonFactoryFinder; import org.geotools.geometry.jts.JTS; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.CoordinateFilter; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryComponentFilter; import org.locationtech.jts.geom.Polygon; import org.locationtech.spatial4j.shape.SpatialRelation; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.filter.expression.Expression; import org.opengis.filter.expression.Literal; import org.opengis.filter.expression.PropertyName; import org.opengis.filter.spatial.BBOX; import org.opengis.filter.spatial.Beyond; import org.opengis.filter.spatial.BinarySpatialOperator; import org.opengis.filter.spatial.Contains; import org.opengis.filter.spatial.DWithin; import org.opengis.filter.spatial.Disjoint; import org.opengis.filter.spatial.DistanceBufferOperator; import org.opengis.filter.spatial.Intersects; import org.opengis.filter.spatial.Within; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.opengis.geometry.BoundingBox; class FilterToElasticHelper { private String key; private Map shapeBuilder; /** * Conversion factor from common units to meter */ static final Map UNITS_MAP = new HashMap() { private static final long serialVersionUID = 1L; { // Metric put("millimeter", 0.001); put("mm", 0.001); put("cm", 0.01); put("m", 1.0); put("kilometers", 1000.0); put("kilometer", 1000.0); put("km", 1000.0); // Other put("in", 0.0254); put("ft", 0.3048); put("feet", 0.3048); put("yd", 0.9144); put("mi", 1609.344); put("miles", 1609.344); put("NM", 1852d); put("nmi", 1852d); } }; private static final Envelope WORLD = new Envelope(-180, 180, -90, 90); private final FilterToElastic delegate; public FilterToElasticHelper(FilterToElastic delegate) { this.delegate = delegate; } Object visitBinarySpatialOperator(BinarySpatialOperator filter, PropertyName property, Literal geometry, boolean swapped, Object extraData) { if (filter instanceof DistanceBufferOperator) { visitDistanceSpatialOperator((DistanceBufferOperator) filter, property, geometry, swapped, extraData); } else { visitComparisonSpatialOperator(filter, property, geometry, swapped, extraData); } return extraData; } Object visitBinarySpatialOperator(BinarySpatialOperator filter, Expression e1, Expression e2, Object extraData) { visitBinarySpatialOperator(filter, e1, e2, false, extraData); return extraData; } private void visitDistanceSpatialOperator(DistanceBufferOperator filter, PropertyName property, Literal geometry, boolean swapped, Object extraData) { property.accept(delegate, extraData); key = (String) delegate.field; geometry.accept(delegate, extraData); final Geometry geo = delegate.currentGeometry; double lat = geo.getCentroid().getY(); double lon = geo.getCentroid().getX(); final double inputDistance = filter.getDistance(); final String inputUnits = filter.getDistanceUnits(); double distance = Double.valueOf(toMeters(inputDistance, inputUnits)); delegate.queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_distance", ImmutableMap.of("distance", distance +"m", key, ImmutableList.of(lon, lat))))); if ((filter instanceof DWithin && swapped) || (filter instanceof Beyond && !swapped)) { delegate.queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must_not", delegate.queryBuilder)); } } private String toMeters(double distance, String unit) { // only geography uses metric units Double conversion = UNITS_MAP.get(unit); if(conversion != null) { return String.valueOf(distance * conversion); } // in case unknown unit or not geography, use as-is return String.valueOf(distance); } private void visitComparisonSpatialOperator(BinarySpatialOperator filter, PropertyName property, Literal geometry, boolean swapped, Object extraData) { // if geography case, sanitize geometry first Literal geometry1 = clipToWorld(geometry); //noinspection RedundantCast visitBinarySpatialOperator(filter, (Expression)property, (Expression) geometry1, swapped, extraData); // if geography case, sanitize geometry first if(isWorld(geometry1)) { // nothing to filter in this case delegate.queryBuilder = MATCH_ALL; return; } else if(isEmpty(geometry1)) { if(!(filter instanceof Disjoint)) { delegate.queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must_not", MATCH_ALL)); } else { delegate.queryBuilder = MATCH_ALL; } return; } //noinspection RedundantCast visitBinarySpatialOperator(filter, (Expression)property, (Expression) geometry1, swapped, extraData); } private void visitBinarySpatialOperator(BinarySpatialOperator filter, Expression e1, Expression e2, boolean swapped, Object extraData) { AttributeDescriptor attType; attType = (AttributeDescriptor)e1.evaluate(delegate.featureType); ElasticGeometryType geometryType; geometryType = (ElasticGeometryType) attType.getUserData().get(GEOMETRY_TYPE); if (geometryType == ElasticGeometryType.GEO_POINT) { visitGeoPointBinarySpatialOperator(filter, e1, e2, swapped, extraData); } else { visitGeoShapeBinarySpatialOperator(filter, e1, e2, swapped, extraData); } } private void visitGeoShapeBinarySpatialOperator(BinarySpatialOperator filter, Expression e1, Expression e2, boolean swapped, Object extraData) { SpatialRelation shapeRelation; if (filter instanceof Disjoint) { shapeRelation = SpatialRelation.DISJOINT; } else if ((!swapped && filter instanceof Within) || (swapped && filter instanceof Contains)) { shapeRelation = SpatialRelation.WITHIN; } else if (filter instanceof Intersects || filter instanceof BBOX) { shapeRelation = SpatialRelation.INTERSECTS; } else { FilterToElastic.LOGGER.fine(filter.getClass().getSimpleName() + " is unsupported for geo_shape types"); shapeRelation = null; delegate.fullySupported = false; } if (shapeRelation != null) { e1.accept(delegate, extraData); key = (String) delegate.field; e2.accept(delegate, extraData); shapeBuilder = delegate.currentShapeBuilder; } if (shapeRelation != null && shapeBuilder != null) { delegate.queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_shape", ImmutableMap.of(key, ImmutableMap.of("shape", shapeBuilder, "relation", shapeRelation))))); } else { delegate.queryBuilder = MATCH_ALL; } } private void visitGeoPointBinarySpatialOperator(BinarySpatialOperator filter, Expression e1, Expression e2, boolean swapped, Object extraData) { e1.accept(delegate, extraData); key = (String) delegate.field; e2.accept(delegate, extraData); final Geometry geometry = delegate.currentGeometry; if (geometry instanceof Polygon && ((!swapped && filter instanceof Within) || (swapped && filter instanceof Contains) || filter instanceof Intersects)) { final Polygon polygon = (Polygon) geometry; final List> points = new ArrayList<>(); for (final Coordinate coordinate : polygon.getCoordinates()) { points.add(ImmutableList.of(coordinate.x, coordinate.y)); } delegate.queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_polygon", ImmutableMap.of(key, ImmutableMap.of("points", points))))); } else if (filter instanceof BBOX) { final BoundingBox envelope = ((BBOX) filter).getBounds(); final double minY = clipLat(envelope.getMinY()); final double maxY = clipLat(envelope.getMaxY()); final double minX, maxX; if (envelope.getWidth() < 360) { minX = clipLon(envelope.getMinX()); maxX = clipLon(envelope.getMaxX()); } else { minX = -180; maxX = 180; } delegate.queryBuilder = ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_bounding_box", ImmutableMap.of(key, ImmutableMap.of("top_left", ImmutableList.of(minX, maxY), "bottom_right", ImmutableList.of(maxX, minY)))))); } else { FilterToElastic.LOGGER.fine(filter.getClass().getSimpleName() + " is unsupported for geo_point types"); delegate.fullySupported = false; delegate.queryBuilder = MATCH_ALL; } } private Literal clipToWorld(Literal geometry) { if(geometry != null) { Geometry g = geometry.evaluate(null, Geometry.class); if(g != null) { g.apply((GeometryComponentFilter) geom -> geom.apply((CoordinateFilter) coord -> { coord.setCoordinate(new Coordinate(clipLon(coord.x), clipLat(coord.y))); })); geometry = CommonFactoryFinder.getFilterFactory(null).literal(g); } } return geometry; } private double clipLon(double lon) { double x = Math.signum(lon)*(Math.abs(lon)%360); return x>180 ? x-360 : (x<-180 ? x+360 : x); } private double clipLat(double lat) { return Math.min(90, Math.max(-90, lat)); } /** * Returns true if the geometry covers the entire world * @param geometry Geometry * @return Flag indicating geometry is the world */ private boolean isWorld(Literal geometry) { boolean result = false; if(geometry != null) { Geometry g = geometry.evaluate(null, Geometry.class); if(g != null) { result = JTS.toGeometry(WORLD).equalsTopo(g.union()); } } return result; } /** * Returns true if the geometry is fully empty * @param geometry Geometry * @return Flag indicating whether geometry is empty */ private boolean isEmpty(Literal geometry) { boolean result = false; if(geometry != null) { Geometry g = geometry.evaluate(null, Geometry.class); result = g == null || g.isEmpty(); } return result; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/GeohashUtil.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.util.Map; import org.locationtech.jts.geom.Envelope; import com.github.davidmoten.geo.GeoHash; class GeohashUtil { public static int computePrecision(Envelope envelope, long size, double threshold) { return computePrecision(envelope, size, threshold, 1); } private static int computePrecision(Envelope envelope, long size, double threshold, int n) { return computeSize(envelope, n)/size > threshold ? n : computePrecision(envelope, size, threshold, n+1); } private static double computeSize(Envelope envelope, int n) { final double area = Math.min(360*180, envelope.getArea()); return area/(GeoHash.widthDegrees(n)*GeoHash.heightDegrees(n)); } public static void updateGridAggregationPrecision(Map>> aggregations, int precision) { aggregations.values().stream().filter(a -> a.containsKey("geohash_grid")).forEach(a -> { Map geohashGrid = a.get("geohash_grid"); if (!geohashGrid.containsKey("precision")) { geohashGrid.put("precision", precision); } }); } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/RestElasticClient.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.io.IOException; import java.io.InputStream; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import mil.nga.giat.data.elasticsearch.ElasticMappings.Mapping; import org.apache.http.HttpEntity; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.geotools.util.logging.Logging; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContext; import org.springframework.security.core.context.SecurityContextHolder; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; public class RestElasticClient implements ElasticClient { final static double DEFAULT_VERSION = 7.0; private final static Logger LOGGER = Logging.getLogger(RestElasticClient.class); private final static DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); private final RestClient client; private final RestClient proxyClient; private final boolean enableRunAs; private final ObjectMapper mapper; private Double version; public RestElasticClient(RestClient client) { this(client, null, false); } public RestElasticClient(RestClient client, RestClient proxyClient, boolean enableRunAs) { this.client = client; this.proxyClient = proxyClient; this.mapper = new ObjectMapper(); this.mapper.setDateFormat(DATE_FORMAT); this.enableRunAs = enableRunAs; } @Override public double getVersion() { if (version != null) { return version; } final Pattern pattern = Pattern.compile("(\\d+\\.\\d+)\\.\\d+"); try { final Response response = performRequest("GET", "/", null, true); try (final InputStream inputStream = response.getEntity().getContent()) { Map info = mapper.readValue(inputStream, new TypeReference>() {}); @SuppressWarnings("unchecked") Map ver = (Map) info.getOrDefault("version", Collections.EMPTY_MAP); final Matcher m = pattern.matcher((String) ver.get("number")); if (!m.find()) { version = DEFAULT_VERSION; } else { version = Double.valueOf(m.group(1)); } } } catch (Exception e) { LOGGER.warning("Error getting server version: " + e); version = DEFAULT_VERSION; } return version; } @Override public List getTypes(String indexName) throws IOException { return new ArrayList<>(getMappings(indexName, null).keySet()); } @Override public Map getMapping(String indexName, String type) throws IOException { final Map mappings = getMappings(indexName, type); final Map properties; if (getVersion() < 7 && mappings.containsKey(type)) { properties = mappings.get(type).getProperties(); } else if (getVersion() >= 7) { final Mapping mapping = mappings.values().stream().findFirst().orElse(null); properties = mapping != null ? mapping.getProperties() : null; } else { properties = null; } return properties; } private Map getMappings(String indexName, String type) throws IOException { final Response response; try { final StringBuilder path = new StringBuilder("/").append(indexName).append("/_mapping"); if (type != null && getVersion() < 7) { path.append("/").append(type); } response = performRequest("GET", path.toString(), null, true); } catch (ResponseException e) { if (e.getResponse().getStatusLine().getStatusCode() == 404) { return Collections.emptyMap(); } throw e; } final String aliasedIndex = getIndices(indexName).stream().findFirst().orElse(null); try (final InputStream inputStream = response.getEntity().getContent()) { final Map values; if (getVersion() < 7) { values = this.mapper.readValue(inputStream, new TypeReference>() { }); } else { final Map res; res = this.mapper.readValue(inputStream, new TypeReference>() { }); values = new HashMap<>(); for (final Entry entry : res.entrySet()) { final ElasticMappings mappings = new ElasticMappings(); mappings.setMappings(new HashMap<>()); if (aliasedIndex != null && aliasedIndex.equals(entry.getKey())) { mappings.getMappings().put(aliasedIndex, entry.getValue().getMappings()); values.put(aliasedIndex, mappings); } else { mappings.getMappings().put(indexName, entry.getValue().getMappings()); values.put(entry.getKey(), mappings); } } } final Map mappings; if (values.containsKey(indexName)) { mappings = values.get(indexName).getMappings(); } else { if (values.containsKey(aliasedIndex)) { mappings = values.get(aliasedIndex).getMappings(); } else if (!values.isEmpty()) { mappings = values.values().iterator().next().getMappings(); } else { LOGGER.severe("No types found for index/alias " + indexName); mappings = Collections.emptyMap(); } } return mappings; } } @Override public ElasticResponse search(String searchIndices, String type, ElasticRequest request) throws IOException { final StringBuilder pathBuilder = new StringBuilder("/" + searchIndices); if (getVersion() < 7) { pathBuilder.append("/" + type); } pathBuilder.append("/_search"); final Map requestBody = new HashMap<>(); if (request.getSize() != null) { requestBody.put("size", request.getSize()); } if (request.getFrom() != null) { requestBody.put("from", request.getFrom()); } if (request.getScroll() != null) { pathBuilder.append("?scroll=").append(request.getScroll()).append("s"); } final List sourceIncludes = request.getSourceIncludes(); if (sourceIncludes.size() == 1) { requestBody.put("_source", sourceIncludes.get(0)); } else if (!sourceIncludes.isEmpty()) { requestBody.put("_source", sourceIncludes); } if (!request.getFields().isEmpty()) { final String key = getVersion() >= 5 ? "stored_fields" : "fields"; requestBody.put(key, request.getFields()); } if (!request.getSorts().isEmpty()) { requestBody.put("sort", request.getSorts()); } if (request.getQuery() != null) { requestBody.put("query", request.getQuery()); } if (request.getAggregations() != null) { requestBody.put("aggregations", request.getAggregations()); } return parseResponse(performRequest("POST", pathBuilder.toString(), requestBody)); } private Response performRequest(String method, String path, Map requestBody, boolean isAdmin) throws IOException { final HttpEntity entity; if (requestBody != null) { final byte[] data = this.mapper.writeValueAsBytes(requestBody); entity = new ByteArrayEntity(data, ContentType.APPLICATION_JSON); } else { entity = null; } if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Method: " + method); LOGGER.fine("Path: " + path); final String requestString = this.mapper.writerWithDefaultPrettyPrinter().writeValueAsString(requestBody); LOGGER.fine("RequestBody: " + requestString); } final RestClient client = isAdmin || this.proxyClient == null ? this.client : this.proxyClient; final Request request = new Request(method, path); request.setEntity(entity); if (!isAdmin && enableRunAs) { final SecurityContext ctx = SecurityContextHolder.getContext(); final Authentication auth = ctx.getAuthentication(); if (auth == null) { throw new IllegalStateException("Authentication could not be determined!"); } if (!auth.isAuthenticated()) { throw new IllegalStateException(String.format("User is not authenticated: %s", auth.getName())); } final RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader(RUN_AS, auth.getName()); request.setOptions(optionsBuilder); LOGGER.fine(String.format("Performing request on behalf of user %s", auth.getName())); } else { LOGGER.fine(String.format("Performing request with %s credentials", isAdmin ? "user" : "proxy")); } final Response response = client.performRequest(request); if (response.getStatusLine().getStatusCode() >= 400) { throw new IOException("Error executing request: " + response.getStatusLine().getReasonPhrase()); } return response; } Response performRequest(String method, String path, Map requestBody) throws IOException { return performRequest(method, path, requestBody, false); } private ElasticResponse parseResponse(final Response response) throws IOException { try (final InputStream inputStream = response.getEntity().getContent()) { return this.mapper.readValue(inputStream, ElasticResponse.class); } } @Override public ElasticResponse scroll(String scrollId, Integer scrollTime) throws IOException { final String path = "/_search/scroll"; final Map requestBody = new HashMap<>(); requestBody.put("scroll_id", scrollId); requestBody.put("scroll", scrollTime + "s"); return parseResponse(performRequest("POST", path, requestBody)); } @Override public void clearScroll(Set scrollIds) throws IOException { final String path = "/_search/scroll"; if (!scrollIds.isEmpty()) { final Map requestBody = new HashMap<>(); requestBody.put("scroll_id", scrollIds); performRequest("DELETE", path, requestBody); } } @Override public void close() throws IOException { LOGGER.fine("Closing proxyClient: " + this.client); try { this.client.close(); } finally { if (this.proxyClient != null) { LOGGER.fine("Closing proxyClient: " + this.proxyClient); this.proxyClient.close(); } } } @SuppressWarnings("unchecked") public static void removeMapping(String parent, String key, Map data, String currentParent) { Iterator> it = data.entrySet().iterator(); while (it.hasNext()) { Entry entry = it.next(); if (Objects.equals(currentParent, parent) && entry.getKey().equals(key)) { it.remove(); } else if (entry.getValue() instanceof Map) { removeMapping(parent, key, (Map) entry.getValue(), entry.getKey()); } else if (entry.getValue() instanceof List) { ((List) entry.getValue()).stream() .filter(item -> item instanceof Map) .forEach(item -> removeMapping(parent, key, (Map) item, currentParent)); } } } private Set getIndices(String alias) { Set indices; try { final Response response = performRequest("GET", "/_alias/" + alias, null, true); try (final InputStream inputStream = response.getEntity().getContent()) { final Map result; result = this.mapper.readValue(inputStream, new TypeReference>() {}); indices = result.keySet(); } } catch (IOException e) { indices = new HashSet<>(); } return indices; } } ================================================ FILE: gt-elasticsearch/src/main/java/mil/nga/giat/data/elasticsearch/TotalDeserializer.java ================================================ package mil.nga.giat.data.elasticsearch; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import com.fasterxml.jackson.databind.exc.MismatchedInputException; import java.io.IOException; public class TotalDeserializer extends StdDeserializer { public TotalDeserializer() { this(null); } public TotalDeserializer(Class vc) { super(vc); } @Override public Long deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { try { return jsonParser.readValueAs(Long.class); } catch (MismatchedInputException e) { JsonNode node = jsonParser.getCodec().readTree(jsonParser); return node.get("value").longValue(); } } } ================================================ FILE: gt-elasticsearch/src/main/resources/META-INF/services/org.geotools.data.DataStoreFactorySpi ================================================ mil.nga.giat.data.elasticsearch.ElasticDataStoreFactory ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticAggregationReaderTest.java ================================================ package mil.nga.giat.data.elasticsearch; import org.geotools.data.DataUtilities; import org.geotools.data.store.ContentState; import org.geotools.feature.SchemaException; import org.junit.Before; import org.junit.Test; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import static org.junit.Assert.*; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; public class ElasticAggregationReaderTest { private ContentState state; private List hits; private Map aggregations; private ElasticFeatureReader reader; private SimpleFeature feature; private ObjectMapper mapper; @Before public void setup() throws SchemaException { SimpleFeatureType featureType = DataUtilities.createType("test", "name:String,_aggregation:java.util.HashMap"); state = new ContentState(null); state.setFeatureType(featureType); hits = new ArrayList<>(); aggregations = new LinkedHashMap<>(); mapper = new ObjectMapper(); } @Test public void testNoAggregations() { assertFalse((new ElasticFeatureReader(state, hits, aggregations, 0)).hasNext()); } @Test public void testBuckets() throws IOException { ElasticAggregation aggregation = new ElasticAggregation(); aggregation.setBuckets(new ArrayList<>()); aggregations.put("test", aggregation); assertFalse((new ElasticFeatureReader(state, hits, aggregations, 0)).hasNext()); aggregation.setBuckets(ImmutableList.of(ImmutableMap.of("key1","value1"), ImmutableMap.of("key2","value2"))); reader = new ElasticFeatureReader(state, hits, aggregations, 0); assertTrue(reader.hasNext()); feature = reader.next(); assertNotNull(feature.getAttribute("_aggregation")); assertEquals(ImmutableSet.of("key1"), byteArrayToMap(feature.getAttribute("_aggregation")).keySet()); assertTrue(reader.hasNext()); feature = reader.next(); assertNotNull(feature.getAttribute("_aggregation")); assertEquals(ImmutableSet.of("key2"), byteArrayToMap(feature.getAttribute("_aggregation")).keySet()); assertFalse(reader.hasNext()); } @Test public void testMultipleAggregations() throws IOException { ElasticAggregation aggregation = new ElasticAggregation(); aggregation.setBuckets(ImmutableList.of(ImmutableMap.of("key1","value1"))); aggregations.put("test", aggregation); aggregation = new ElasticAggregation(); aggregation.setBuckets(ImmutableList.of(ImmutableMap.of("key2","value2"))); aggregations.put("test2", aggregation); reader = new ElasticFeatureReader(state, hits, aggregations, 0); assertTrue(reader.hasNext()); feature = reader.next(); assertNotNull(feature.getAttribute("_aggregation")); assertEquals(ImmutableSet.of("key1"), byteArrayToMap(feature.getAttribute("_aggregation")).keySet()); assertFalse(reader.hasNext()); } private Map byteArrayToMap(Object bytes) throws IOException { return mapper.readValue((byte[]) bytes, new TypeReference>() {}); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticAttributeTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import static org.junit.Assert.*; import java.util.Map; import org.junit.Before; import org.junit.Test; import mil.nga.giat.data.elasticsearch.ElasticAttribute.ElasticGeometryType; public class ElasticAttributeTest { private ElasticAttribute attr; private String name; private String shortName; private String customName; private String normalizedName; private boolean useShortName; private Class type; private ElasticGeometryType geometryType; private boolean use; private boolean defaultGeometry; private int srid; private int order; private String dateFormat; private boolean analyzed; private boolean stored; private boolean nested; @Before public void setup() { name = "theName"; attr = new ElasticAttribute(name); shortName = "name"; useShortName = true; customName = "XML Custom Name"; normalizedName = "_XML_Custom_Name"; type = Map.class; geometryType = ElasticGeometryType.GEO_SHAPE; use = true; defaultGeometry = true; srid = 10; order = 1; dateFormat = "yyyy-mm-dd"; analyzed = true; stored = true; nested = true; } @Test public void testAttributes() { attr.setShortName(shortName); attr.setUseShortName(useShortName); attr.setCustomName(customName); attr.setType(type); attr.setGeometryType(geometryType); attr.setUse(use); attr.setDefaultGeometry(defaultGeometry); attr.setSrid(srid); attr.setOrder(order); attr.setDateFormat(dateFormat); attr.setAnalyzed(analyzed); attr.setStored(stored); attr.setNested(nested); assertEquals(attr.getName(), name); assertEquals(attr.getShortName(), shortName); assertEquals(attr.getUseShortName(), useShortName); assertEquals(attr.getCustomName(), normalizedName); assertEquals(attr.getType(), type); assertEquals(attr.getGeometryType(), geometryType); assertEquals(attr.isUse(), use); assertEquals(attr.isDefaultGeometry(), defaultGeometry); assertEquals(attr.getSrid(), srid, 1e-10); assertEquals(attr.getOrder(), Integer.valueOf(order)); assertEquals(attr.getDateFormat(), dateFormat); assertEquals(attr.getAnalyzed(), analyzed); assertEquals(attr.isStored(), stored); assertEquals(attr.isNested(), nested); } @Test public void testDisplayName() { assertEquals(attr.getDisplayName(), name); attr.setShortName("name"); attr.setUseShortName(true); assertEquals("name", attr.getDisplayName()); } @Test public void testHashCode() { assertEquals(attr.hashCode(), (new ElasticAttribute("theName")).hashCode()); assertTrue(attr.hashCode()!=(new ElasticAttribute("name")).hashCode()); } @Test public void testEquals() { assertEquals(attr, new ElasticAttribute("theName")); assertTrue(!attr.equals(new ElasticAttribute("name"))); } @Test public void testClone() { assertEquals(attr, new ElasticAttribute(attr)); } @Test public void testCompare() { ElasticAttribute other = new ElasticAttribute("other"); attr.setOrder(1); other.setOrder(2); assertEquals(-1, attr.compareTo(other)); attr.setOrder(3); other.setOrder(2); assertEquals(1, attr.compareTo(other)); attr.setOrder(null); other.setOrder(1); assertEquals(1, attr.compareTo(other)); attr.setOrder(1); other.setOrder(null); assertEquals(-1, attr.compareTo(other)); other = new ElasticAttribute("zAfter"); attr.setOrder(null); other.setOrder(null); assertTrue(attr.compareTo(other) < 0); other = new ElasticAttribute("before"); attr.setOrder(1); other.setOrder(1); assertTrue(attr.compareTo(other) > 0); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticDataStoreFinderIT.java ================================================ /* * GeoTools - The Open Source Java GIS Toolkit * http://geotools.org * * (C) 2014, Open Source Geospatial Foundation (OSGeo) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package mil.nga.giat.data.elasticsearch; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Logger; import java.util.stream.Collectors; import com.google.common.collect.ImmutableMap; import org.apache.http.HttpHost; import org.elasticsearch.client.Node; import org.geotools.data.DataStore; import org.junit.Test; import com.google.common.collect.ImmutableList; import static org.junit.Assert.*; public class ElasticDataStoreFinderIT extends ElasticTestSupport { private static final Logger LOGGER = org.geotools.util.logging.Logging .getLogger(ElasticDataStoreFinderIT.class); @Test public void testFactoryDefaults() throws IOException { Map params = createConnectionParams(); ElasticDataStoreFactory factory = new ElasticDataStoreFactory(); dataStore = (ElasticDataStore) factory.createDataStore(params); ElasticDataStoreFactory fac = new ElasticDataStoreFactory(); assertEquals(fac.getDisplayName(), ElasticDataStoreFactory.DISPLAY_NAME); assertEquals(fac.getDescription(), ElasticDataStoreFactory.DESCRIPTION); assertArrayEquals(fac.getParametersInfo(), ElasticDataStoreFactory.PARAMS); assertNull(fac.getImplementationHints()); assertNull(fac.createNewDataStore(null)); } @Test public void testFactory() throws IOException { ElasticDataStoreFactory factory = new ElasticDataStoreFactory(); assertTrue(factory.isAvailable()); Map map = new HashMap<>(); map.put(ElasticDataStoreFactory.HOSTNAME.key, "localhost"); map.put(ElasticDataStoreFactory.HOSTPORT.key, PORT); map.put(ElasticDataStoreFactory.INDEX_NAME.key, "sample"); DataStore store = factory.createDataStore(map); assertNotNull(store); assertTrue(store instanceof ElasticDataStore); } @Test public void testFactoryWithMissingRequired() throws IOException { ElasticDataStoreFactory factory = new ElasticDataStoreFactory(); assertTrue(factory.isAvailable()); assertTrue(!factory.canProcess(ImmutableMap.of(ElasticDataStoreFactory.HOSTNAME.key, "localhost", ElasticDataStoreFactory.HOSTPORT.key, PORT))); assertTrue(!factory.canProcess(ImmutableMap.of(ElasticDataStoreFactory.HOSTNAME.key, "localhost", ElasticDataStoreFactory.INDEX_NAME.key, "test"))); assertTrue(!factory.canProcess(ImmutableMap.of(ElasticDataStoreFactory.HOSTNAME.key, "localhost"))); assertTrue(!factory.canProcess(ImmutableMap.of(ElasticDataStoreFactory.HOSTPORT.key, PORT, ElasticDataStoreFactory.INDEX_NAME.key, "test"))); assertTrue(!factory.canProcess(ImmutableMap.of(ElasticDataStoreFactory.HOSTPORT.key, PORT))); assertTrue(!factory.canProcess(ImmutableMap.of(ElasticDataStoreFactory.INDEX_NAME.key, "test"))); } @Test public void testCreateRestClient() throws IOException { assertEquals(ImmutableList.of(new HttpHost("localhost", PORT, "http")), getHosts("localhost")); assertEquals(ImmutableList.of(new HttpHost("localhost.localdomain", PORT, "http")), getHosts("localhost.localdomain")); assertEquals(ImmutableList.of(new HttpHost("localhost", 9201, "http")), getHosts("localhost:9201")); assertEquals(ImmutableList.of(new HttpHost("localhost.localdomain", 9201, "http")), getHosts("localhost.localdomain:9201")); assertEquals(ImmutableList.of(new HttpHost("localhost", PORT, "http")), getHosts("http://localhost")); assertEquals(ImmutableList.of(new HttpHost("localhost", 9200, "http")), getHosts("http://localhost:9200")); assertEquals(ImmutableList.of(new HttpHost("localhost", 9201, "http")), getHosts("http://localhost:9201")); assertEquals(ImmutableList.of(new HttpHost("localhost", PORT, "https")), getHosts("https://localhost")); assertEquals(ImmutableList.of(new HttpHost("localhost", 9200, "https")), getHosts("https://localhost:9200")); assertEquals(ImmutableList.of(new HttpHost("localhost", 9201, "https")), getHosts("https://localhost:9201")); assertEquals(ImmutableList.of( new HttpHost("somehost.somedomain", PORT, "http"), new HttpHost("anotherhost.somedomain", PORT, "http")), getHosts("somehost.somedomain:9200,anotherhost.somedomain:9200")); assertEquals(ImmutableList.of( new HttpHost("somehost.somedomain", PORT, "https"), new HttpHost("anotherhost.somedomain", PORT, "https")), getHosts("https://somehost.somedomain:9200,https://anotherhost.somedomain:9200")); assertEquals(ImmutableList.of( new HttpHost("somehost.somedomain", PORT, "https"), new HttpHost("anotherhost.somedomain", PORT, "https")), getHosts("https://somehost.somedomain:9200, https://anotherhost.somedomain:9200")); assertEquals(ImmutableList.of( new HttpHost("somehost.somedomain", PORT, "https"), new HttpHost("anotherhost.somedomain", PORT, "http")), getHosts("https://somehost.somedomain:9200,anotherhost.somedomain:9200")); } private List getHosts(String hosts) throws IOException { Map params = createConnectionParams(); params.put(ElasticDataStoreFactory.HOSTNAME.key, hosts); ElasticDataStoreFactory factory = new ElasticDataStoreFactory(); return factory.createRestClient(params).getNodes().stream().map(Node::getHost).collect(Collectors.toList()); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticDataStoreIT.java ================================================ /* * GeoTools - The Open Source Java GIS Toolkit * http://geotools.org * * (C) 2014, Open Source Geospatial Foundation (OSGeo) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package mil.nga.giat.data.elasticsearch; import java.io.IOException; import java.io.Serializable; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.StatusLine; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.geotools.data.DataStore; import org.geotools.data.store.ContentFeatureSource; import org.junit.Test; import org.opengis.feature.simple.SimpleFeatureType; import com.google.common.collect.ImmutableMap; import static org.junit.Assert.*; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class ElasticDataStoreIT extends ElasticTestSupport { @Test public void testConstructionWithHostAndPortAndIndex() throws IOException { Map params = createConnectionParams(); String host = ElasticDataStoreFactory.getValue(ElasticDataStoreFactory.HOSTNAME, params); Integer port = ElasticDataStoreFactory.getValue(ElasticDataStoreFactory.HOSTPORT, params); String indexName = ElasticDataStoreFactory.getValue(ElasticDataStoreFactory.INDEX_NAME, params); DataStore dataStore = new ElasticDataStore(host, port, indexName); String[] typeNames = dataStore.getTypeNames(); assertTrue(typeNames.length > 0); } @Test public void testConstructionWithClientAndIndex() throws IOException { Map params = createConnectionParams(); String host = ElasticDataStoreFactory.getValue(ElasticDataStoreFactory.HOSTNAME, params); Integer port = ElasticDataStoreFactory.getValue(ElasticDataStoreFactory.HOSTPORT, params); String indexName = ElasticDataStoreFactory.getValue(ElasticDataStoreFactory.INDEX_NAME, params); HttpHost httpHost = new HttpHost(host, port, "http"); RestClient client = RestClient.builder(httpHost).build(); DataStore dataStore = new ElasticDataStore(client, indexName); String[] typeNames = dataStore.getTypeNames(); assertTrue(typeNames.length > 0); } @Test public void testConstructionWithProxyClientAndIndex() throws IOException { Map params = createConnectionParams(); String host = ElasticDataStoreFactory.getValue(ElasticDataStoreFactory.HOSTNAME, params); Integer port = ElasticDataStoreFactory.getValue(ElasticDataStoreFactory.HOSTPORT, params); String indexName = ElasticDataStoreFactory.getValue(ElasticDataStoreFactory.INDEX_NAME, params); HttpHost httpHost = new HttpHost(host, port, "http"); RestClient client = RestClient.builder(httpHost).build(); DataStore dataStore = new ElasticDataStore(client, client, indexName, false); String[] typeNames = dataStore.getTypeNames(); assertTrue(typeNames.length > 0); } @Test(expected=IOException.class) public void testConstructionWithBadClient() throws IOException { Map params = createConnectionParams(); String indexName = ElasticDataStoreFactory.getValue(ElasticDataStoreFactory.INDEX_NAME, params); RestClient mockClient = mock(RestClient.class); Response mockResponse = mock(Response.class); HttpEntity mockEntity = mock(HttpEntity.class); StatusLine mockStatusLine = mock(StatusLine.class); when(mockResponse.getEntity()).thenReturn(mockEntity); when(mockResponse.getStatusLine()).thenReturn(mockStatusLine); when(mockStatusLine.getStatusCode()).thenReturn(400); when(mockClient.performRequest(any(Request.class))).thenReturn(mockResponse); new ElasticDataStore(mockClient, indexName); } @Test(expected=IOException.class) public void testConstructionWithBadProxyClient() throws IOException { Map params = createConnectionParams(); String indexName = ElasticDataStoreFactory.getValue(ElasticDataStoreFactory.INDEX_NAME, params); RestClient mockClient = mock(RestClient.class); Response mockResponse = mock(Response.class); HttpEntity mockEntity = mock(HttpEntity.class); StatusLine mockStatusLine = mock(StatusLine.class); when(mockResponse.getEntity()).thenReturn(mockEntity); when(mockResponse.getStatusLine()).thenReturn(mockStatusLine); when(mockClient.performRequest(any(Request.class))).thenReturn(mockResponse); final AtomicInteger count = new AtomicInteger(0); when(mockStatusLine.getStatusCode()).thenAnswer((invocation) -> count.getAndIncrement() == 0 ? 200 : 400 ); new ElasticDataStore(mockClient, mockClient, indexName, false); } @Test public void testGetNames() throws IOException { Map params = createConnectionParams(); ElasticDataStoreFactory factory = new ElasticDataStoreFactory(); DataStore dataStore = factory.createDataStore(params); String[] typeNames = dataStore.getTypeNames(); assertTrue(typeNames.length > 0); } @Test public void testGetNamesByAlias() throws IOException { Map params = createConnectionParams(); params.put(ElasticDataStoreFactory.INDEX_NAME.key, indexName + "_alias"); ElasticDataStoreFactory factory = new ElasticDataStoreFactory(); DataStore dataStore = factory.createDataStore(params); String[] typeNames = dataStore.getTypeNames(); assertTrue(typeNames.length > 0); } @Test public void testLayerConfigClone() { ElasticLayerConfiguration layerConfig = new ElasticLayerConfiguration("d"); layerConfig.setLayerName("ln"); layerConfig.getAttributes().add(new ElasticAttribute("a1")); ElasticLayerConfiguration layerConfig2 = new ElasticLayerConfiguration(layerConfig); assertEquals(layerConfig.getDocType(), layerConfig2.getDocType()); assertEquals(layerConfig.getLayerName(), layerConfig2.getLayerName()); assertEquals(layerConfig.getAttributes(), layerConfig2.getAttributes()); } @Test public void testSchema() throws IOException { Map params = createConnectionParams(); ElasticDataStoreFactory factory = new ElasticDataStoreFactory(); ElasticDataStore dataStore = (ElasticDataStore) factory.createDataStore(params); ContentFeatureSource featureSource = dataStore.getFeatureSource(dataStore.getTypeNames()[0]); SimpleFeatureType schema = featureSource.getSchema(); assertTrue(schema.getAttributeCount() > 0); assertNotNull(schema.getDescriptor("speed_is")); } @Test public void testSchemaWithValidCustomName() throws Exception { init(); Map params = createConnectionParams(); ElasticDataStoreFactory factory = new ElasticDataStoreFactory(); ElasticDataStore dataStore = (ElasticDataStore) factory.createDataStore(params); ElasticLayerConfiguration config2 = new ElasticLayerConfiguration(config); config2.setLayerName("fake"); dataStore.setLayerConfiguration(config2); ContentFeatureSource featureSource = dataStore.getFeatureSource("fake"); SimpleFeatureType schema = featureSource.getSchema(); assertTrue(schema.getAttributeCount() > 0); assertNotNull(schema.getDescriptor("speed_is")); } @Test public void testIsAnalyzed() { assertFalse(ElasticDataStore.isAnalyzed(new HashMap<>())); assertFalse(ElasticDataStore.isAnalyzed(ImmutableMap.of("type", "keyword"))); assertFalse(ElasticDataStore.isAnalyzed(ImmutableMap.of("type", ImmutableMap.of("type", "keyword")))); assertFalse(ElasticDataStore.isAnalyzed(ImmutableMap.of("type", "not_valid"))); assertTrue(ElasticDataStore.isAnalyzed(ImmutableMap.of("type", "text"))); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticDatastoreFactoryTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import org.apache.http.HttpHost; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.config.RequestConfig; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.geotools.data.DataStore; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import java.io.IOException; import java.io.Serializable; import java.io.UncheckedIOException; import java.security.KeyStoreException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ThreadFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class ElasticDatastoreFactoryTest { private ElasticDataStoreFactory dataStoreFactory; private RestClientBuilder clientBuilder; private ArgumentCaptor hostsCaptor; private ArgumentCaptor configCallbackCaptor; private ArgumentCaptor credentialsProviderCaptor; private HttpAsyncClientBuilder httpClientBuilder; private ArgumentCaptor threadFactoryCaptor; private ArgumentCaptor requestConfigCallbackCaptor; private RequestConfig.Builder requestConfigBuilder; private Map params; @Before public void setUp() throws IOException { dataStoreFactory = Mockito.spy(new ElasticDataStoreFactory()); clientBuilder = mock(RestClientBuilder.class); hostsCaptor = ArgumentCaptor.forClass(HttpHost[].class); Mockito.doReturn(clientBuilder).when(dataStoreFactory).createClientBuilder(hostsCaptor.capture()); final RestClient restClient = mock(RestClient.class); when(clientBuilder.build()).thenReturn(restClient); final DataStore dataStore = mock(DataStore.class); Mockito.doReturn(dataStore).when(dataStoreFactory).createDataStore(any(RestClient.class), any(), anyMap()); configCallbackCaptor = ArgumentCaptor.forClass(RestClientBuilder.HttpClientConfigCallback.class); when(clientBuilder.setHttpClientConfigCallback(configCallbackCaptor.capture())).thenReturn(clientBuilder); httpClientBuilder = mock(HttpAsyncClientBuilder.class); credentialsProviderCaptor = ArgumentCaptor.forClass(CredentialsProvider.class); when(httpClientBuilder.setDefaultCredentialsProvider(credentialsProviderCaptor.capture())).thenReturn(httpClientBuilder); threadFactoryCaptor = ArgumentCaptor.forClass(ThreadFactory.class); when(httpClientBuilder.setThreadFactory(threadFactoryCaptor.capture())).thenReturn(httpClientBuilder); requestConfigCallbackCaptor = ArgumentCaptor.forClass(RestClientBuilder.RequestConfigCallback.class); when(clientBuilder.setRequestConfigCallback(requestConfigCallbackCaptor.capture())).thenReturn(clientBuilder); requestConfigBuilder = mock(RequestConfig.Builder.class); when(requestConfigBuilder.setAuthenticationEnabled(true)).thenReturn(requestConfigBuilder); params = getParams("localhost", 9200, "admin", "proxy"); ElasticDataStoreFactory.httpThreads.set(1); } private Map getParams(String hosts, int port, String user, String proxyUser) { final Map params = new HashMap<>(); params.put(ElasticDataStoreFactory.HOSTNAME.key, hosts); params.put(ElasticDataStoreFactory.HOSTPORT.key, port); params.put(ElasticDataStoreFactory.INDEX_NAME.key, "test"); if (user != null) { params.put(ElasticDataStoreFactory.USER.key, user); params.put(ElasticDataStoreFactory.PASSWD.key, user); } if (proxyUser != null) { params.put(ElasticDataStoreFactory.PROXY_USER.key, proxyUser); params.put(ElasticDataStoreFactory.PROXY_PASSWD.key, proxyUser); } return params; } @Test public void testBuildClient() throws IOException { assertNotNull(dataStoreFactory.createDataStore(params)); verify(clientBuilder, times(2)).build(); assertEquals(1, hostsCaptor.getValue().length); assertEquals("localhost", hostsCaptor.getValue()[0].getHostName()); assertEquals(9200, hostsCaptor.getValue()[0].getPort()); assertEquals(2, configCallbackCaptor.getAllValues().size()); configCallbackCaptor.getAllValues().get(0).customizeHttpClient(httpClientBuilder); Credentials credentials = credentialsProviderCaptor.getValue().getCredentials(new AuthScope("localhost", 9200)); assertNotNull(credentials); assertEquals("admin", credentials.getUserPrincipal().getName()); assertEquals("admin", credentials.getPassword()); configCallbackCaptor.getAllValues().get(1).customizeHttpClient(httpClientBuilder); Credentials proxyCredentials = credentialsProviderCaptor.getValue().getCredentials(new AuthScope("localhost", 9200)); assertNotNull(proxyCredentials); assertEquals("proxy", proxyCredentials.getUserPrincipal().getName()); assertEquals("proxy", proxyCredentials.getPassword()); assertEquals(2, threadFactoryCaptor.getAllValues().size()); ThreadFactory threadFactory = threadFactoryCaptor.getAllValues().get(0); Thread thread = threadFactory.newThread(mock(Runnable.class)); assertEquals("esrest-asynchttp-ADMIN-1", thread.getName()); ThreadFactory proxyThreadFactory = threadFactoryCaptor.getAllValues().get(1); Thread proxyThread = proxyThreadFactory.newThread(mock(Runnable.class)); assertEquals("esrest-asynchttp-PROXY_USER-2", proxyThread.getName()); assertNotNull(requestConfigCallbackCaptor.getValue().customizeRequestConfig(requestConfigBuilder)); } @Test public void testBuildClientWithoutProxy() throws IOException { params = getParams("localhost", 9200, "admin", null); assertNotNull(dataStoreFactory.createDataStore(params)); verify(clientBuilder, times(1)).build(); assertEquals(1, hostsCaptor.getValue().length); assertEquals("localhost", hostsCaptor.getValue()[0].getHostName()); assertEquals(9200, hostsCaptor.getValue()[0].getPort()); assertEquals(1, configCallbackCaptor.getAllValues().size()); configCallbackCaptor.getAllValues().get(0).customizeHttpClient(httpClientBuilder); Credentials credentials = credentialsProviderCaptor.getValue().getCredentials(new AuthScope("localhost", 9200)); assertNotNull(credentials); assertEquals("admin", credentials.getUserPrincipal().getName()); assertEquals("admin", credentials.getPassword()); assertEquals(1, threadFactoryCaptor.getAllValues().size()); ThreadFactory threadFactory = threadFactoryCaptor.getAllValues().get(0); Thread thread = threadFactory.newThread(mock(Runnable.class)); assertEquals("esrest-asynchttp-ADMIN-1", thread.getName()); assertNotNull(requestConfigCallbackCaptor.getValue().customizeRequestConfig(requestConfigBuilder)); } @Test public void testBuildClientWithoutAuth() throws IOException { params = getParams("localhost", 9200, null, null); assertNotNull(dataStoreFactory.createDataStore(params)); verify(clientBuilder, times(1)).build(); assertEquals(1, hostsCaptor.getValue().length); assertEquals("localhost", hostsCaptor.getValue()[0].getHostName()); assertEquals(9200, hostsCaptor.getValue()[0].getPort()); assertEquals(1, configCallbackCaptor.getAllValues().size()); configCallbackCaptor.getAllValues().get(0).customizeHttpClient(httpClientBuilder); assertEquals(0, credentialsProviderCaptor.getAllValues().size()); assertEquals(1, threadFactoryCaptor.getAllValues().size()); ThreadFactory threadFactory = threadFactoryCaptor.getAllValues().get(0); Thread thread = threadFactory.newThread(mock(Runnable.class)); assertEquals("esrest-asynchttp-ADMIN-1", thread.getName()); } @Test public void testBuildClientWithMultipleHosts() throws IOException { params = getParams("localhost1,localhost2", 9201, "admin", "proxy"); assertNotNull(dataStoreFactory.createDataStore(params)); verify(clientBuilder, times(2)).build(); assertEquals(2, hostsCaptor.getValue().length); assertEquals("localhost1", hostsCaptor.getValue()[0].getHostName()); assertEquals(9201, hostsCaptor.getValue()[0].getPort()); assertEquals("localhost2", hostsCaptor.getValue()[1].getHostName()); assertEquals(9201, hostsCaptor.getValue()[1].getPort()); configCallbackCaptor.getValue().customizeHttpClient(httpClientBuilder); assertNotNull(credentialsProviderCaptor.getValue().getCredentials(new AuthScope("localhost1", 9201))); assertNotNull(credentialsProviderCaptor.getValue().getCredentials(new AuthScope("localhost2", 9201))); } @Test public void testCreateClientbuilder() { ElasticDataStoreFactory factory = new ElasticDataStoreFactory(); HttpHost[] hosts = new HttpHost[] { new HttpHost("localhost", 9200)}; assertNotNull(factory.createClientBuilder(hosts)); } @Test public void testBuildClientWithSslRejectUnauthorizedDisabled() throws IOException { params.put(ElasticDataStoreFactory.SSL_REJECT_UNAUTHORIZED.key, false); assertNotNull(dataStoreFactory.createDataStore(params)); assertTrue(configCallbackCaptor.getAllValues().size() > 0); configCallbackCaptor.getAllValues().get(0).customizeHttpClient(httpClientBuilder); verify(httpClientBuilder, times(1)).setSSLContext(any()); } @Test(expected=UncheckedIOException.class) public void testBuildClientWithSslRejectUnauthorizedDisabledAndInvalidSSLContext() throws IOException { params.put(ElasticDataStoreFactory.SSL_REJECT_UNAUTHORIZED.key, false); assertNotNull(dataStoreFactory.createDataStore(params)); assertTrue(configCallbackCaptor.getAllValues().size() > 0); when(httpClientBuilder.setSSLContext(any())).thenThrow(KeyStoreException.class); configCallbackCaptor.getAllValues().get(0).customizeHttpClient(httpClientBuilder); } @Test(expected=IOException.class) public void testBuildClientWithInvalidHost() throws IOException { params = getParams(":", 9200, null, null); dataStoreFactory.createDataStore(params); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticFeatureFilterIT.java ================================================ /* * GeoTools - The Open Source Java GIS Toolkit * http://geotools.org * * (C) 2014, Open Source Geospatial Foundation (OSGeo) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package mil.nga.giat.data.elasticsearch; import java.net.URLDecoder; import java.nio.charset.StandardCharsets; import java.util.*; import org.geotools.data.Query; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.data.simple.SimpleFeatureIterator; import org.geotools.data.store.ContentEntry; import org.geotools.feature.NameImpl; import org.geotools.geometry.jts.ReferencedEnvelope; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import static org.junit.Assert.*; import org.opengis.feature.Property; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.GeometryDescriptor; import org.opengis.feature.type.Name; import org.opengis.filter.And; import org.opengis.filter.Filter; import org.opengis.filter.FilterFactory; import org.opengis.filter.Id; import org.opengis.filter.Not; import org.opengis.filter.Or; import org.opengis.filter.PropertyIsBetween; import org.opengis.filter.PropertyIsEqualTo; import org.opengis.filter.PropertyIsGreaterThan; import org.opengis.filter.PropertyIsGreaterThanOrEqualTo; import org.opengis.filter.PropertyIsLessThan; import org.opengis.filter.PropertyIsLessThanOrEqualTo; import org.opengis.filter.PropertyIsLike; import org.opengis.filter.PropertyIsNotEqualTo; import org.opengis.filter.PropertyIsNull; import org.opengis.filter.sort.SortBy; import org.opengis.filter.sort.SortOrder; import org.opengis.filter.spatial.BBOX; import mil.nga.giat.data.elasticsearch.ElasticDataStore.ArrayEncoding; public class ElasticFeatureFilterIT extends ElasticTestSupport { @Test public void testSchema() throws Exception { init(); SimpleFeatureType schema = featureSource.getSchema(); assertNotNull(schema); assertNotNull(schema.getGeometryDescriptor()); assertTrue(schema.getDescriptor("geo") instanceof GeometryDescriptor); assertTrue(schema.getDescriptor("geo2") instanceof GeometryDescriptor); assertTrue(schema.getDescriptor("geo3") instanceof GeometryDescriptor); assertTrue(schema.getDescriptor("geo5") instanceof GeometryDescriptor); } @Test @Ignore public void testSchemaWithoutLayerConfig() throws Exception { init(); ElasticFeatureSource featureSource = new ElasticFeatureSource(new ContentEntry(dataStore, new NameImpl("invalid")),null); SimpleFeatureType schema = featureSource.getSchema(); assertNotNull(schema); assertEquals(0, schema.getAttributeCount()); } @Test public void testSchemaWithShortName() throws Exception { init(); ElasticLayerConfiguration layerConfig = dataStore.getLayerConfigurations().get("active"); for (ElasticAttribute attribute : layerConfig.getAttributes()) { attribute.setUseShortName(true); } SimpleFeatureType schema = featureSource.getSchema(); assertNotNull(schema); assertNotNull(schema.getDescriptor("hejda")); } @Test @Ignore public void testSchemaWithInvalidSrid() throws Exception { init(); ElasticLayerConfiguration layerConfig = dataStore.getLayerConfigurations().get("active"); for (ElasticAttribute attribute : layerConfig.getAttributes()) { attribute.setSrid(-1); } SimpleFeatureType schema = featureSource.getSchema(); assertNotNull(schema); assertNull(schema.getGeometryDescriptor()); assertNull(schema.getDescriptor("geo")); } @Test public void testCount() throws Exception { init(); assertEquals(11, featureSource.getCount(Query.ALL)); } @Test public void testBounds() throws Exception { init(); ReferencedEnvelope bounds = featureSource.getBounds(); assertEquals(0L, Math.round(bounds.getMinX())); assertEquals(0L, Math.round(bounds.getMinY())); assertEquals(24, Math.round(bounds.getMaxX())); assertEquals(44, Math.round(bounds.getMaxY())); } @Test public void testCountWithIsEqualFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsEqualTo filter = ff.equals(ff.property("vendor_s"), ff.literal("D-Link")); Query query = new Query(); query.setFilter(filter); assertEquals(4, featureSource.getCount(query)); } @Test public void testCountWithIsNotEqualFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsNotEqualTo filter = ff.notEqual(ff.property("vendor_s"), ff.literal("D-Link")); Query query = new Query(); query.setFilter(filter); assertEquals(7, featureSource.getCount(query)); } @Test public void testCountWithOffsetLimit() throws Exception { init(); Query query = new Query(); query.setStartIndex(5); query.setMaxFeatures(11); assertEquals(6, featureSource.getCount(query)); } @Test public void testGetFeaturesWithAndLogicFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsEqualTo property = ff.equals(ff.property("standard_ss"), ff.literal("IEEE 802.11b")); BBOX bbox = ff.bbox("geo", -1, -1, 10, 10, "EPSG:" + SOURCE_SRID); And filter = ff.and(property, bbox); SimpleFeatureCollection features = featureSource.getFeatures(filter); assertEquals(3, features.size()); } @Test public void testGetFeaturesWithORLogicFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsEqualTo property1 = ff.equals(ff.property("vendor_s"), ff.literal("D-Link")); PropertyIsEqualTo property2 = ff.equals(ff.property("vendor_s"), ff.literal("Linksys")); Or filter = ff.or(property1, property2); SimpleFeatureCollection features = featureSource.getFeatures(filter); assertEquals(6, features.size()); SimpleFeatureIterator iterator = features.features(); while (iterator.hasNext()) { SimpleFeature f = iterator.next(); assertTrue(f.getAttribute("vendor_s").equals("D-Link") || f.getAttribute("vendor_s").equals("Linksys")); } } @Test public void testGetFeaturesWithNOTLogicFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsEqualTo property1 = ff.equals(ff.property("vendor_s"), ff.literal("D-Link")); Not filter = ff.not(property1); SimpleFeatureCollection features = featureSource.getFeatures(filter); assertEquals(7, features.size()); SimpleFeatureIterator iterator = features.features(); while (iterator.hasNext()) { SimpleFeature f = iterator.next(); assertTrue(!f.getAttribute("vendor_s").equals("D-Link")); } } @Test public void testGetFeaturesWithIdFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); Id id = ff.id(new HashSet<>(Arrays.asList(ff.featureId("01"), ff.featureId("07")))); SimpleFeatureCollection features = featureSource.getFeatures(id); assertEquals(2, features.size()); } @Test public void testGetFeaturesWithBetweenFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsBetween between = ff.between(ff.property("speed_is"), ff.literal(0), ff.literal(150)); SimpleFeatureCollection features = featureSource.getFeatures(between); assertEquals(9, features.size()); SimpleFeatureIterator iterator = features.features(); while (iterator.hasNext()) { SimpleFeature f = iterator.next(); boolean found = false; if (f.getAttribute("speed_is") instanceof List) { for (Object obj : (List) f.getAttribute("speed_is")) { int v = (Integer) obj; if (v >= 0 && v <= 150) { found = true; break; } } } else { int v = (Integer) f.getAttribute("speed_is"); found = (v >= 0 && v <= 150); } assertTrue(found); } between = ff.between(ff.property("speed_is"), ff.literal(160), ff.literal(300)); features = featureSource.getFeatures(between); assertEquals(5, features.size()); iterator = features.features(); while (iterator.hasNext()) { SimpleFeature f = iterator.next(); boolean found = false; if (f.getAttribute("speed_is") instanceof List) { for (Object obj : (List) f.getAttribute("speed_is")) { int v = (Integer) obj; if (v >= 160 && v <= 300) { found = true; break; } } } else { int v = (Integer) f.getAttribute("speed_is"); found = (v >= 160 && v <= 300); } assertTrue(found); } } @Test public void testGetFeaturesWithQuery() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsEqualTo filter = ff.equals(ff.property("modem_b"), ff.literal(true)); Query query = new Query(); query.setPropertyNames(new String[] { "standard_ss", "security_ss" }); query.setFilter(filter); SimpleFeatureCollection features = featureSource.getFeatures(query); assertEquals(8, features.size()); try (SimpleFeatureIterator iterator = features.features()) { assertTrue(iterator.hasNext()); SimpleFeature feature = iterator.next(); assertEquals(2, feature.getAttributeCount()); String st = (String) feature.getAttribute("standard_ss"); // changed from "IEEE 802.11b" in SolrFeatureSourceTest assertTrue(st.contains("IEEE 802.11b")); } } @Test public void testReadStringArrayWithCsvStrategy() throws Exception { init(); dataStore.setArrayEncoding(ArrayEncoding.CSV); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsEqualTo filter = ff.equals(ff.property("modem_b"), ff.literal(true)); SimpleFeatureCollection features = featureSource.getFeatures(filter); assertEquals(8, features.size()); try (SimpleFeatureIterator iterator = features.features()) { assertTrue(iterator.hasNext()); SimpleFeature feature = iterator.next(); String st = (String) feature.getAttribute("standard_ss"); // changed from "IEEE 802.11b" in SolrFeatureSourceTest assertTrue(URLDecoder.decode(st, StandardCharsets.UTF_8.toString()).startsWith("IEEE 802.11")); } } @Test public void testReadNumericArrayWithCsvStrategy() throws Exception { init(); dataStore.setArrayEncoding(ArrayEncoding.CSV); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsBetween between = ff.between(ff.property("speed_is"), ff.literal(160), ff.literal(300)); SimpleFeatureCollection features = featureSource.getFeatures(between); assertEquals(5, features.size()); SimpleFeatureIterator iterator = features.features(); while (iterator.hasNext()) { SimpleFeature f = iterator.next(); assertFalse(f.getAttribute("speed_is") instanceof List); } } @Test public void testGetFeaturesWithSort() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); SortBy sort = ff.sort("vendor_s", SortOrder.ASCENDING); Query query = new Query(); query.setSortBy(new SortBy[] { sort }); SimpleFeatureCollection features = featureSource.getFeatures(query); assertEquals(11, features.size()); SimpleFeatureIterator iterator = features.features(); SimpleFeature f; try { assertTrue(iterator.hasNext()); f = iterator.next(); assertEquals("Asus", f.getAttribute("vendor_s")); assertTrue(iterator.hasNext()); f = iterator.next(); assertEquals("Cisco", f.getAttribute("vendor_s")); assertTrue(iterator.hasNext()); f = iterator.next(); assertEquals("Cisco", f.getAttribute("vendor_s")); } finally { iterator.close(); } sort = ff.sort("vendor_s", SortOrder.DESCENDING); query.setSortBy(new SortBy[] { sort }); features = featureSource.getFeatures(query); iterator = features.features(); try { assertTrue(iterator.hasNext()); f = iterator.next(); assertEquals("TP-Link", f.getAttribute("vendor_s")); assertTrue(iterator.hasNext()); f = iterator.next(); assertEquals("Linksys", f.getAttribute("vendor_s")); assertTrue(iterator.hasNext()); f = iterator.next(); assertEquals("Linksys", f.getAttribute("vendor_s")); } finally { iterator.close(); } } @Test public void testGetFeaturesWithOffsetLimit() throws Exception { init(); Query q = new Query(featureSource.getSchema().getTypeName()); // no sorting, let's see if the database can use native one q.setStartIndex(1); q.setMaxFeatures(1); SimpleFeatureCollection features = featureSource.getFeatures(q); // check size assertEquals(1, features.size()); // check actual iteration try (SimpleFeatureIterator it = features.features()) { assertTrue(it.hasNext()); SimpleFeature f = it.next(); assertEquals(2, Integer.parseInt((String) f.getAttribute("id"))); assertFalse(it.hasNext()); } } @Test public void testNaturalSortingAsc() throws Exception { init(); Query q = new Query(featureSource.getSchema().getTypeName()); q.setSortBy(new SortBy[] { SortBy.NATURAL_ORDER }); SimpleFeatureIterator features = featureSource.getFeatures(q).features(); String prevId = null; while (features.hasNext()) { String currId = features.next().getID(); if (prevId != null) assertTrue(prevId.compareTo(currId) <= 0); prevId = currId; } features.close(); } @Test public void testNaturalSortingDesc() throws Exception { init(); Query q = new Query(featureSource.getSchema().getTypeName()); q.setSortBy(new SortBy[] { SortBy.REVERSE_ORDER }); SimpleFeatureIterator features = featureSource.getFeatures(q).features(); String prevId = null; while (features.hasNext()) { String currId = features.next().getID(); if (prevId != null) assertTrue(prevId.compareTo(currId) >= 0); prevId = currId; } features.close(); } @Test public void testGetFeaturesWithIsGreaterThanFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsGreaterThan f = ff.greater(ff.property("speed_is"), ff.literal(300)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(0, features.size()); } @Test public void testGetFeaturesWithIsGreaterThanOrEqualToFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsGreaterThanOrEqualTo f = ff.greaterOrEqual(ff.property("speed_is"), ff.literal(300)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(5, features.size()); } @Test public void testGetFeaturesWithIsLessThanFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsLessThan f = ff.less(ff.property("speed_is"), ff.literal(150)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(0, features.size()); } @Test public void testGetFeaturesWithLessThanOrEqualToFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsLessThanOrEqualTo f = ff.lessOrEqual(ff.property("speed_is"), ff.literal(150)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(9, features.size()); } @Test public void testGetFeaturesWithIsLikeFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsLike f = ff.like(ff.property("standard_ss"), "IEEE 802.11?"); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(11, features.size()); } @Test public void testGetFeaturesWithIsNullFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsNull f = ff.isNull(ff.property("security_ss")); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); } @Test public void testBBOXFilterWithBBOXType() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.bbox("geo3", 12.5, 7.5, 14, 19, "epsg:4326"); SimpleFeatureCollection features = featureSource.getFeatures(f); assertCovered(features, 2, 5, 6); } @Test public void testOnlyStoredFields() throws Exception { init(); Name name = new NameImpl("active"); for (final ElasticAttribute attribute : dataStore.getElasticAttributes(name) ){ if (!attribute.isStored()) { attribute.setUse(false); } } assertEquals(11, featureSource.getCount(Query.ALL)); SimpleFeatureIterator features = featureSource.getFeatures().features(); for (int i=0; i<11; i++) { assertTrue(features.hasNext()); features.next(); } } @Test public void testOnlySourceFields() throws Exception { init(); Name name = new NameImpl("active"); for (final ElasticAttribute attribute : dataStore.getElasticAttributes(name) ){ if (attribute.isStored()) { attribute.setUse(false); } } featureSource = (ElasticFeatureSource) dataStore.getFeatureSource(TYPE_NAME); assertEquals(11, featureSource.getCount(Query.ALL)); SimpleFeatureIterator features = featureSource.getFeatures().features(); for (int i=0; i<11; i++) { assertTrue(features.hasNext()); features.next(); } } @Test public void testOnlyStoredFieldsWithSourceFiltering() throws Exception { init(); dataStore.setSourceFilteringEnabled(true); Name name = new NameImpl("active"); for (final ElasticAttribute attribute : dataStore.getElasticAttributes(name) ){ if (!attribute.isStored()) { attribute.setUse(false); } } assertEquals(11, featureSource.getCount(Query.ALL)); SimpleFeatureIterator features = featureSource.getFeatures().features(); for (int i=0; i<11; i++) { assertTrue(features.hasNext()); features.next(); } } @Test public void testOnlySourceFieldsWithSourceFiltering() throws Exception { init(); dataStore.setSourceFilteringEnabled(true); Name name = new NameImpl("active"); for (final ElasticAttribute attribute : dataStore.getElasticAttributes(name) ){ if (attribute.isStored()) { attribute.setUse(false); } } featureSource = (ElasticFeatureSource) dataStore.getFeatureSource(TYPE_NAME); assertEquals(11, featureSource.getCount(Query.ALL)); SimpleFeatureIterator features = featureSource.getFeatures().features(); for (int i=0; i<11; i++) { assertTrue(features.hasNext()); features.next(); } } /** * This test ensures that when specifying properties in a query with source filtering enabled you only get back the * properties specified. If properties are not specified or Query.ALL_PROPERTIES is used then you get everything. */ @Test public void testFieldsWithSourceFiltering() throws Exception { init(); dataStore.setSourceFilteringEnabled(true); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsEqualTo filter = ff.equals(ff.property("modem_b"), ff.literal(true)); Query query = new Query(); query.setFilter(filter); SimpleFeatureCollection features = featureSource.getFeatures(query); try(SimpleFeatureIterator iterator = features.features()) { assertTrue(iterator.hasNext()); SimpleFeature feature = iterator.next(); Assert.assertTrue(feature.getProperties().size() > 2); } // Specify Columns query.setPropertyNames(new String[] { "standard_ss", "security_ss" }); features = featureSource.getFeatures(query); try(SimpleFeatureIterator iterator = features.features()) { assertTrue(iterator.hasNext()); SimpleFeature feature = iterator.next(); Iterator propertyIterator = feature.getProperties().iterator(); Assert.assertEquals(query.getPropertyNames().length, feature.getProperties().size()); Assert.assertEquals(query.getPropertyNames()[0], propertyIterator.next().getName().getLocalPart()); Assert.assertEquals(query.getPropertyNames()[1], propertyIterator.next().getName().getLocalPart()); } // Specify All query.setProperties(Query.ALL_PROPERTIES); features = featureSource.getFeatures(query); try(SimpleFeatureIterator iterator = features.features()) { assertTrue(iterator.hasNext()); SimpleFeature feature = iterator.next(); Assert.assertTrue(feature.getProperties().size() > 2); } } @Test public void testGetFeaturesWithIsGreaterThanFilterOnObjectType() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsGreaterThan f = ff.greater(ff.property("object.hejda"), ff.literal(10)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(6, features.size()); } @Test public void testGetFeaturesWithIsGreaterThanFilterOnNestedType() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsGreaterThan f = ff.greater(ff.property("nested.hej"), ff.literal(10)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(8, features.size()); } @Test public void testGetFeaturesWithIsBetweenFilterOnObjectType() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsBetween f = ff.between(ff.property("object.hejda"), ff.literal(5), ff.literal(15)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(5, features.size()); } @Test public void testGetFeaturesWithIsBetweenFilterOnNestedType() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsBetween f = ff.between(ff.property("nested.hej"), ff.literal(5), ff.literal(15)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(10, features.size()); } @Test public void testGetFeaturesWithIsGreaterThanFilterOnNestedChildType() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsGreaterThan f = ff.greater(ff.property("nested.parent.child"), ff.literal("ba")); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(8, features.size()); } @Test public void testScrollSizesDoesntChangesOutputSize() throws Exception { init(); dataStore.setScrollSize(3L); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsGreaterThan f = ff.greater(ff.property("nested.parent.child"), ff.literal("ba")); List features = readFeatures(featureSource.getFeatures(f).features()); assertEquals(8, features.size()); } @Test public void testScrollTimeDoesntChangesOutputSize() throws Exception { init(); Integer initialScrollTime = dataStore.getScrollTime(); dataStore.setScrollTime(initialScrollTime * 10); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsGreaterThan f = ff.greater(ff.property("nested.parent.child"), ff.literal("ba")); List features = readFeatures(featureSource.getFeatures(f).features()); assertEquals(8, features.size()); } @Test public void testScrollEnabledDoesntChangesOutputSize() throws Exception { init(); dataStore.setScrollEnabled(true); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsGreaterThan f = ff.greater(ff.property("nested.parent.child"), ff.literal("ba")); List features = readFeatures(featureSource.getFeatures(f).features()); assertEquals(8, features.size()); } @Test public void testScrollHonorsMaxFeatures() throws Exception { init(); dataStore.setScrollSize(1L); Query q = new Query(); q.setMaxFeatures(7); List features = readFeatures(featureSource.getFeatures(q).features()); assertEquals(7, features.size()); } @Test(expected=NoSuchElementException.class) public void testScrollNoSuchElement() throws Exception { init(); dataStore.setScrollSize(1L); Query q = new Query(); q.setMaxFeatures(1); SimpleFeatureIterator it = featureSource.getFeatures(q).features(); assertTrue(it.hasNext()); it.next(); assertTrue(!it.hasNext()); it.next(); } @Test public void testDefaultMaxFeatures() throws Exception { init(); dataStore.setDefaultMaxFeatures(2); Query q = new Query(); List features = readFeatures(featureSource.getFeatures(q).features()); assertEquals(2, features.size()); } private void assertCovered(SimpleFeatureCollection features, Integer... ids) { assertEquals(ids.length, features.size()); Set s = new HashSet<>(Arrays.asList(ids)); SimpleFeatureIterator it = features.features(); while (it.hasNext()) { SimpleFeature f = it.next(); s.remove(Integer.parseInt(f.getAttribute("id").toString())); } assertTrue(s.isEmpty()); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticFilterTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import static org.junit.Assert.*; import static org.hamcrest.collection.IsIn.isOneOf; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import mil.nga.giat.data.elasticsearch.ElasticAttribute.ElasticGeometryType; import static mil.nga.giat.data.elasticsearch.ElasticConstants.ANALYZED; import static mil.nga.giat.data.elasticsearch.ElasticConstants.DATE_FORMAT; import static mil.nga.giat.data.elasticsearch.ElasticConstants.GEOMETRY_TYPE; import static mil.nga.giat.data.elasticsearch.ElasticConstants.MATCH_ALL; import static mil.nga.giat.data.elasticsearch.ElasticConstants.NESTED; import org.geotools.data.Query; import org.geotools.factory.CommonFactoryFinder; import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.filter.text.cql2.CQLException; import org.geotools.filter.text.ecql.ECQL; import org.geotools.temporal.object.DefaultInstant; import org.geotools.temporal.object.DefaultPeriod; import org.geotools.temporal.object.DefaultPosition; import org.geotools.util.factory.Hints; import org.junit.Before; import org.junit.Test; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.Point; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeDescriptor; import org.opengis.filter.And; import org.opengis.filter.ExcludeFilter; import org.opengis.filter.Filter; import org.opengis.filter.FilterFactory2; import org.opengis.filter.Id; import org.opengis.filter.IncludeFilter; import org.opengis.filter.Not; import org.opengis.filter.Or; import org.opengis.filter.PropertyIsBetween; import org.opengis.filter.PropertyIsEqualTo; import org.opengis.filter.PropertyIsGreaterThan; import org.opengis.filter.PropertyIsGreaterThanOrEqualTo; import org.opengis.filter.PropertyIsLessThan; import org.opengis.filter.PropertyIsLessThanOrEqualTo; import org.opengis.filter.PropertyIsLike; import org.opengis.filter.PropertyIsNotEqualTo; import org.opengis.filter.PropertyIsNull; import org.opengis.filter.expression.NilExpression; import org.opengis.filter.spatial.BBOX; import org.opengis.filter.spatial.Beyond; import org.opengis.filter.spatial.DWithin; import org.opengis.filter.spatial.Disjoint; import org.opengis.filter.spatial.Intersects; import org.opengis.filter.temporal.After; import org.opengis.filter.temporal.Begins; import org.opengis.filter.temporal.BegunBy; import org.opengis.filter.temporal.During; import org.opengis.filter.temporal.EndedBy; import org.opengis.filter.temporal.Ends; import org.opengis.filter.temporal.TContains; import org.opengis.filter.temporal.TEquals; import org.opengis.temporal.Instant; import org.opengis.temporal.Period; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; public class ElasticFilterTest { private FilterToElastic builder; private FilterFactory2 ff; private GeometryFactory gf; private SimpleFeatureType featureType; private Map parameters; private Query query; private DateFormat dateFormat; @Before public void setUp() { ff = CommonFactoryFinder.getFilterFactory2(); SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder(); typeBuilder.setName("test"); typeBuilder.add("stringAttr", String.class); typeBuilder.add("integerAttr", Integer.class); typeBuilder.add("longAttr", Long.class); typeBuilder.add("booleanAttr", Boolean.class); typeBuilder.add("doubleAttr", Double.class); typeBuilder.add("floatAttr", Float.class); typeBuilder.add("dateAttr", Date.class); AttributeDescriptor geoPointAtt; AttributeTypeBuilder geoPointAttBuilder = new AttributeTypeBuilder(); geoPointAttBuilder.setName("geo_point"); geoPointAttBuilder.setBinding(Point.class); geoPointAtt = geoPointAttBuilder.buildDescriptor("geo_point", geoPointAttBuilder.buildType()); geoPointAtt.getUserData().put(GEOMETRY_TYPE, ElasticGeometryType.GEO_POINT); typeBuilder.add(geoPointAtt); AttributeDescriptor geoShapeAtt; AttributeTypeBuilder geoShapeAttBuilder = new AttributeTypeBuilder(); geoShapeAttBuilder.setName("geom"); geoShapeAttBuilder.setBinding(Geometry.class); geoShapeAtt = geoShapeAttBuilder.buildDescriptor("geom", geoShapeAttBuilder.buildType()); geoShapeAtt.getUserData().put(GEOMETRY_TYPE, ElasticGeometryType.GEO_SHAPE); typeBuilder.add(geoShapeAtt); AttributeDescriptor analyzedAtt; AttributeTypeBuilder analyzedAttBuilder = new AttributeTypeBuilder(); analyzedAttBuilder.setName("analyzed"); analyzedAttBuilder.setBinding(String.class); analyzedAtt = analyzedAttBuilder.buildDescriptor("analyzed", analyzedAttBuilder.buildType()); analyzedAtt.getUserData().put(ANALYZED, true); typeBuilder.add(analyzedAtt); AttributeDescriptor netsedAtt; AttributeTypeBuilder nestedAttBuilder = new AttributeTypeBuilder(); nestedAttBuilder.setName("nested.hej"); nestedAttBuilder.setBinding(String.class); netsedAtt = nestedAttBuilder.buildDescriptor("nested.hej", nestedAttBuilder.buildType()); netsedAtt.getUserData().put(NESTED, true); netsedAtt.getUserData().put(ANALYZED, true); typeBuilder.add(netsedAtt); AttributeDescriptor netsedDateAtt; AttributeTypeBuilder nestedDateAttBuilder = new AttributeTypeBuilder(); nestedDateAttBuilder.setName("nested.datehej"); nestedDateAttBuilder.setBinding(Date.class); netsedDateAtt = nestedDateAttBuilder.buildDescriptor("nested.datehej", nestedDateAttBuilder.buildType()); netsedDateAtt.getUserData().put(NESTED, true); typeBuilder.add(netsedDateAtt); featureType = typeBuilder.buildFeatureType(); setFilterBuilder(); parameters = new HashMap<>(); final Hints hints = new Hints(); hints.put(Hints.VIRTUAL_TABLE_PARAMETERS, parameters); query = new Query(); query.setHints(hints); dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); gf = new GeometryFactory(); } private void setFilterBuilder() { builder = new FilterToElastic(); builder.setFeatureType(featureType); } private void addDateWithFormatToFeatureType(String format) { SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder(); typeBuilder.init(featureType); AttributeDescriptor dateAtt; AttributeTypeBuilder dateAttBuilder = new AttributeTypeBuilder(); dateAttBuilder.setName("dateAttrWithFormat"); dateAttBuilder.setBinding(Date.class); dateAtt = dateAttBuilder.buildDescriptor("dateAttrWithFormat", dateAttBuilder.buildType()); dateAtt.getUserData().put(DATE_FORMAT, format); typeBuilder.add(dateAtt); featureType = typeBuilder.buildFeatureType(); setFilterBuilder(); } @Test public void testEncodeQuery() { Query query = new Query(); query.setFilter(Filter.INCLUDE); builder.encode(query); assertEquals(MATCH_ALL, builder.getQueryBuilder()); assertEquals(MATCH_ALL, builder.getNativeQueryBuilder()); assertNull(builder.getAggregations()); assertTrue(builder.getFullySupported()); } @Test public void testId() { final Id filter = ff.id(ff.featureId("id")); Map expected = ImmutableMap.of("ids", ImmutableMap.of("values", ImmutableList.of("id"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testAnd() { And filter = ff.and(ff.id(ff.featureId("id1")), ff.id(ff.featureId("id2"))); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must", ImmutableList.of(ImmutableMap.of("ids", ImmutableMap.of("values", ImmutableList.of("id1"))), ImmutableMap.of("ids", ImmutableMap.of("values", ImmutableList.of("id2")))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testOr() { final Or filter = ff.or(ff.id(ff.featureId("id1")), ff.id(ff.featureId("id2"))); Map expected = ImmutableMap.of("bool", ImmutableMap.of("should", ImmutableList.of(ImmutableMap.of("ids", ImmutableMap.of("values", ImmutableList.of("id1"))), ImmutableMap.of("ids", ImmutableMap.of("values", ImmutableList.of("id2")))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testNot() { Not filter = ff.not(ff.id(ff.featureId("id"))); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must_not", ImmutableMap.of("ids", ImmutableMap.of("values", ImmutableList.of("id"))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsNull() { PropertyIsNull filter = ff.isNull(ff.property("prop")); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must_not", ImmutableMap.of("exists", ImmutableMap.of("field", "prop")))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsNotNull() { Not filter = ff.not(ff.isNull(ff.property("prop"))); Map expected = ImmutableMap.of("exists", ImmutableMap.of("field", "prop")); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsEqualToString() { PropertyIsEqualTo filter = ff.equals(ff.property("stringAttr"), ff.literal("value")); Map expected = ImmutableMap.of("term", ImmutableMap.of("stringAttr", "value")); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testNestedPropertyIsEqualToString() { PropertyIsEqualTo filter = ff.equals(ff.property("nested.hej"), ff.literal("value")); Map expected = ImmutableMap.of("nested", ImmutableMap.of("path", "nested", "query", ImmutableMap.of("term", ImmutableMap.of("nested.hej", "value")))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected,builder.getQueryBuilder()); } @Test public void testNestedStringIsEqualToProperty() { PropertyIsEqualTo filter = ff.equals(ff.literal("value"), ff.property("nested.hej")); Map expected = ImmutableMap.of("nested", ImmutableMap.of("path", "nested", "query", ImmutableMap.of("term", ImmutableMap.of("nested.hej", "value")))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected,builder.getQueryBuilder()); } @Test public void testPropertyIsNotEqualToString() { PropertyIsNotEqualTo filter = ff.notEqual(ff.property("stringAttr"), ff.literal("value")); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must_not",ImmutableMap.of("term", ImmutableMap.of("stringAttr", "value")))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsEqualToDouble() { PropertyIsEqualTo filter = ff.equals(ff.property("doubleAttr"), ff.literal("4.5")); Map expected = ImmutableMap.of("term", ImmutableMap.of("doubleAttr", 4.5)); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testDoubleIsEqualtoProperty() { PropertyIsEqualTo filter = ff.equals(ff.literal("4.5"), ff.property("doubleAttr")); Map expected = ImmutableMap.of("term", ImmutableMap.of("doubleAttr", 4.5)); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsNotEqualToDouble() { PropertyIsNotEqualTo filter = ff.notEqual(ff.property("doubleAttr"), ff.literal("4.5")); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must_not",ImmutableMap.of("term", ImmutableMap.of("doubleAttr", 4.5)))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsEqualToFloat() { PropertyIsEqualTo filter = ff.equals(ff.property("floatAttr"), ff.literal("4.5")); Map expected = ImmutableMap.of("term", ImmutableMap.of("floatAttr", 4.5f)); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsEqualToInteger() { PropertyIsEqualTo filter = ff.equals(ff.property("integerAttr"), ff.literal("4")); Map expected = ImmutableMap.of("term", ImmutableMap.of("integerAttr", 4)); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsEqualToBoolean() { PropertyIsEqualTo filter = ff.equals(ff.property("booleanAttr"), ff.literal("true")); Map expected = ImmutableMap.of("term", ImmutableMap.of("booleanAttr", true)); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsGreaterThan() { PropertyIsGreaterThan filter = ff.greater(ff.property("doubleAttr"), ff.literal("4.5")); Map expected = ImmutableMap.of("range", ImmutableMap.of("doubleAttr", ImmutableMap.of("gt", 4.5))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsLessThan() { PropertyIsLessThan filter = ff.less(ff.property("doubleAttr"), ff.literal("4.5")); Map expected = ImmutableMap.of("range", ImmutableMap.of("doubleAttr", ImmutableMap.of("lt", 4.5))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsGreaterThanOrEqualTo() { PropertyIsGreaterThanOrEqualTo filter = ff.greaterOrEqual(ff.property("doubleAttr"), ff.literal("4.5")); Map expected = ImmutableMap.of("range", ImmutableMap.of("doubleAttr", ImmutableMap.of("gte", 4.5))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsLessThanOrEqualTo() { PropertyIsLessThanOrEqualTo filter = ff.lessOrEqual(ff.property("doubleAttr"), ff.literal("4.5")); Map expected = ImmutableMap.of("range", ImmutableMap.of("doubleAttr", ImmutableMap.of("lte", 4.5))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testPropertyIsBetween() { PropertyIsBetween filter = ff.between(ff.property("doubleAttr"), ff.literal("4.5"), ff.literal("5.5")); Map expected = ImmutableMap.of("range", ImmutableMap.of("doubleAttr", ImmutableMap.of("gte", 4.5, "lte", 5.5))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testUnknownPropertyIsBetween() { PropertyIsBetween filter = ff.between(ff.property("unknownStr"), ff.literal("a"), ff.literal("c")); Map expected = ImmutableMap.of("range", ImmutableMap.of("unknownStr", ImmutableMap.of("gte", "a", "lte", "c"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testIncludeFilter() { IncludeFilter filter = Filter.INCLUDE; builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(MATCH_ALL, builder.getQueryBuilder()); } @Test public void testExcludeFilter() { ExcludeFilter filter = Filter.EXCLUDE; Map expected = ImmutableMap.of("bool", ImmutableMap.of("must_not",MATCH_ALL)); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testNullFilter() { assertNull(builder.visitNullFilter(null)); } @Test public void testNilFilter() { builder.field = "field"; builder.visit((NilExpression) NilExpression.NIL, null); assertNull(builder.field); } @Test public void testNullBinarySpatialOperatorFilter() { boolean success = false; try { builder.visit((BBOX) null, null); } catch (NullPointerException e) { success = true; } assertTrue(success); } @Test public void testPropertyIsLike() { PropertyIsLike filter = ff.like(ff.property("analyzed"), "hello"); Map expected = ImmutableMap.of("query_string", ImmutableMap.of("query", "hello", "default_field", "analyzed")); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testCaseSensitivePropertyIsLike() { PropertyIsLike filter = ff.like(ff.property("analyzed"), "hello", "\\", "*", ".", true); Map expected = ImmutableMap.of("query_string", ImmutableMap.of("query", "hello", "default_field", "analyzed")); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testNestedPropertyIsLike() { PropertyIsLike filter = ff.like(ff.property("nested.hej"), "hello"); Map expectedFilter = ImmutableMap.of("query_string", ImmutableMap.of("query", "hello", "default_field", "nested.hej")); Map expected = ImmutableMap.of("nested", ImmutableMap.of("path", "nested", "query", expectedFilter)); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected,builder.getQueryBuilder()); } @Test public void testConvertToRegex() { assertEquals("BroadWay.*", FilterToElastic.convertToRegex('!', '*', '.', "BroadWay*")); assertEquals("broad#ay", FilterToElastic.convertToRegex('!', '*', '.', "broad#ay")); assertEquals("broadway", FilterToElastic.convertToRegex('!', '*', '.', "broadway")); assertEquals("broad.ay", FilterToElastic.convertToRegex('!', '*', '.', "broad.ay")); assertEquals("broad\\.ay", FilterToElastic.convertToRegex('!', '*', '.', "broad!.ay")); assertEquals("broa'dway", FilterToElastic.convertToRegex('!', '*', '.', "broa'dway")); assertEquals("broa''dway", FilterToElastic.convertToRegex('!', '*', '.', "broa''dway")); assertEquals("broadway.", FilterToElastic.convertToRegex('!', '*', '.', "broadway.")); assertEquals("broadway", FilterToElastic.convertToRegex('!', '*', '.', "broadway!")); assertEquals("broadway\\!", FilterToElastic.convertToRegex('!', '*', '.', "broadway!!")); assertEquals("broadway\\\\", FilterToElastic.convertToRegex('\\', '*', '.', "broadway\\\\")); assertEquals("broadway\\", FilterToElastic.convertToRegex('!', '*', '.', "broadway\\")); } @Test public void testConvertToQueryString() { assertEquals("BroadWay*", FilterToElastic.convertToQueryString('!', '*', '.', "BroadWay*")); assertEquals("broad#ay", FilterToElastic.convertToQueryString('!', '*', '.', "broad#ay")); assertEquals("broadway", FilterToElastic.convertToQueryString('!', '*', '.', "broadway")); assertEquals("broad?ay", FilterToElastic.convertToQueryString('!', '*', '.', "broad.ay")); assertEquals("broad\\.ay", FilterToElastic.convertToQueryString('!', '*', '.', "broad!.ay")); assertEquals("broa'dway", FilterToElastic.convertToQueryString('!', '*', '.', "broa'dway")); assertEquals("broa''dway", FilterToElastic.convertToQueryString('!', '*', '.', "broa''dway")); assertEquals("broadway?", FilterToElastic.convertToQueryString('!', '*', '.', "broadway.")); assertEquals("broadway", FilterToElastic.convertToQueryString('!', '*', '.', "broadway!")); assertEquals("broadway\\!", FilterToElastic.convertToQueryString('!', '*', '.', "broadway!!")); assertEquals("broadway\\\\", FilterToElastic.convertToQueryString('\\', '*', '.', "broadway\\\\")); assertEquals("broadway\\", FilterToElastic.convertToQueryString('!', '*', '.', "broadway\\")); } @Test public void testGeoShapeBboxFilter() { BBOX filter = ff.bbox("geom", 0., 0., 1.1, 1.1, "EPSG:4326"); List> coords = new ArrayList<>(); coords.add(ImmutableList.of(0.,0.)); coords.add(ImmutableList.of(0.,1.1)); coords.add(ImmutableList.of(1.1,1.1)); coords.add(ImmutableList.of(1.1,0.)); coords.add(ImmutableList.of(0.,0.)); // vertices in reverse order final List> reverseCoords = ImmutableList.of( coords.get(0), coords.get(3), coords.get(2), coords.get(1), coords.get(4) ); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_shape", ImmutableMap.of("geom", ImmutableMap.of("shape", ImmutableMap.of("coordinates", ImmutableList.of(coords), "type", "Polygon"), "relation", "INTERSECTS"))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertThat(builder.getQueryBuilder().toString(), isOneOf(expected.toString(), expected.toString().replace(coords.toString(), reverseCoords.toString()))); } @Test public void testGeoShapeIntersectsFilter() throws CQLException { Intersects filter = (Intersects) ECQL.toFilter("INTERSECTS(\"geom\", LINESTRING(0 0,1.1 1.1))"); List> coords = new ArrayList<>(); coords.add(ImmutableList.of(0.,0.)); coords.add(ImmutableList.of(1.1,1.1)); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_shape", ImmutableMap.of("geom", ImmutableMap.of("shape", ImmutableMap.of("coordinates", coords, "type", "LineString"), "relation", "INTERSECTS"))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); // TODO: Why doesn't equality check on objects work here assertEquals(expected.toString(), builder.getQueryBuilder().toString()); } @Test public void testEmptyGeoShape() { LineString ls = gf.createLineString(new Coordinate[0]); Intersects filter = ff.intersects(ff.property("geom"), ff.literal(ls)); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must_not",MATCH_ALL)); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testEmptyDisjointGeoShape() { LineString ls = gf.createLineString(new Coordinate[0]); Disjoint filter = ff.disjoint(ff.property("geom"), ff.literal(ls)); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(MATCH_ALL, builder.getQueryBuilder()); } @Test public void testGeoShapeIntersectsFilterReversed() throws CQLException { Intersects filter = (Intersects) ECQL.toFilter("INTERSECTS(LINESTRING(0 0,1.1 1.1), \"geom\")"); List> coords = new ArrayList<>(); coords.add(ImmutableList.of(0.,0.)); coords.add(ImmutableList.of(1.1,1.1)); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_shape", ImmutableMap.of("geom", ImmutableMap.of("shape", ImmutableMap.of("coordinates", coords, "type", "LineString"), "relation", "INTERSECTS"))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected.toString(), builder.getQueryBuilder().toString()); } @Test public void testAndWithBbox() { And filter = ff.and(ff.id(ff.featureId("id1")), ff.bbox("geom", 0., 0., 1.1, 1.1, "EPSG:4326")); List> coords = new ArrayList<>(); coords.add(ImmutableList.of(0.,0.)); coords.add(ImmutableList.of(0.,1.1)); coords.add(ImmutableList.of(1.1,1.1)); coords.add(ImmutableList.of(1.1,0.)); coords.add(ImmutableList.of(0.,0.)); // vertices in reverse order List> reverseCoords = ImmutableList.of( coords.get(0), coords.get(3), coords.get(2), coords.get(1), coords.get(4) ); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must", ImmutableList.of( ImmutableMap.of("ids", ImmutableMap.of("values", ImmutableList.of("id1"))), ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_shape", ImmutableMap.of("geom", ImmutableMap.of("shape", ImmutableMap.of("coordinates", ImmutableList.of(coords), "type", "Polygon"), "relation", "INTERSECTS"))) ))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertThat(builder.getQueryBuilder().toString(), isOneOf(expected.toString(), expected.toString().replace(coords.toString(), reverseCoords.toString()))); } @Test public void testGeoPointBboxFilter() { BBOX filter = ff.bbox("geo_point", 0., 0., 1., 1., "EPSG:4326"); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_bounding_box", ImmutableMap.of("geo_point", ImmutableMap.of("top_left", ImmutableList.of(0.,1.) , "bottom_right", ImmutableList.of(1.,0.)))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testGeoPolygonFilter() throws CQLException { Intersects filter = (Intersects) ECQL.toFilter("INTERSECTS(\"geo_point\", POLYGON((0 0, 0 1.1, 1.1 1.1, 1.1 0, 0 0)))"); List> points = ImmutableList.of( ImmutableList.of(0.,0.), ImmutableList.of(0.,1.1), ImmutableList.of(1.1,1.1), ImmutableList.of(1.1,0.), ImmutableList.of(0.,0.)); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_polygon", ImmutableMap.of("geo_point", ImmutableMap.of("points", points))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testDWithinFilter() throws CQLException { DWithin filter = (DWithin) ECQL.toFilter("DWITHIN(\"geo_point\", POINT(0 1.1), 1.0, meters)"); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_distance", ImmutableMap.of("distance", "1.0m", "geo_point", ImmutableList.of(0.,1.1))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testDWithinPolygonFilter() throws CQLException { DWithin filter = (DWithin) ECQL.toFilter("DWITHIN(\"geo_point\", POLYGON((0 0, 0 1, 1 1, 1 0, 0 0)), 1.0, meters)"); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_distance", ImmutableMap.of("distance", "1.0m", "geo_point", ImmutableList.of(0.5,0.5))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testDBeyondFilter() throws CQLException { Beyond filter = (Beyond) ECQL.toFilter("BEYOND(\"geo_point\", POINT(0 1.1), 1.0, meters)"); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must_not", ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_distance", ImmutableMap.of("distance", "1.0m", "geo_point", ImmutableList.of(0.,1.1))))))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testLinearRingVisit() { final Geometry ring = gf.createLinearRing(new Coordinate[]{ new Coordinate(0, 0), new Coordinate(1, 1), new Coordinate(1, 0), new Coordinate(0, 0) }); builder.visit(ff.literal(ring), null); assertEquals("LineString", builder.currentGeometry.getGeometryType()); } @Test public void testCompoundFilter() throws CQLException { Filter filter = ECQL.toFilter("time > \"1970-01-01\" and INTERSECTS(\"geom\", LINESTRING(0 0,1.1 1.1))"); List> coords = new ArrayList<>(); coords.add(ImmutableList.of(0.,0.)); coords.add(ImmutableList.of(1.1,1.1)); Map expected = ImmutableMap.of("bool", ImmutableMap.of("must", ImmutableList.of(ImmutableMap.of("range", ImmutableMap.of("time", ImmutableMap.of("gt", "1970-01-01"))), ImmutableMap.of("bool", ImmutableMap.of("must", MATCH_ALL, "filter", ImmutableMap.of("geo_shape", ImmutableMap.of("geom", ImmutableMap.of("shape", ImmutableMap.of("coordinates", coords, "type", "LineString"), "relation", "INTERSECTS")))))))); builder.encode(filter); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected.toString(), builder.getQueryBuilder().toString()); } @Test public void testCql() throws CQLException { Filter filter = ECQL.toFilter("\"object.field\"='value'"); Map expected = ImmutableMap.of("term", ImmutableMap.of("object.field", "value")); builder.encode(filter); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testViewParamWithNullHints() { query.setHints(null); builder.addViewParams(query); assertEquals(MATCH_ALL, builder.getQueryBuilder()); assertEquals(MATCH_ALL, builder.nativeQueryBuilder); } @Test public void testQueryViewParam() throws JsonProcessingException { Map idsQuery = ImmutableMap.of("ids", ImmutableMap.of("value", ImmutableList.of("type1"))); parameters.put("q", new ObjectMapper().writeValueAsString(idsQuery)); builder.addViewParams(query); assertEquals(idsQuery, builder.nativeQueryBuilder); } @Test public void testAggregationViewParam() { final String aggregation = "{\"ageohash_grid_agg\":{\"geohash_grid\": {\"field\":\"a_field\",\"precision\":1}}}"; parameters.put("a", aggregation); builder.addViewParams(query); assertEquals(ImmutableMap.of("ageohash_grid_agg", ImmutableMap.of("geohash_grid", ImmutableMap.of("field","a_field","precision",1))),builder.aggregations); } @Test public void testUrlEncodedAggregationViewParam() { final String aggregation = "%7B%22ageohash_grid_agg%22%3A%20%7B%22geohash_grid%22%3A%20%7B%22field%22%3A%20%22a_field%22%2C%20%22precision%22%3A%201%7D%7D%7D"; parameters.put("a", aggregation); builder.addViewParams(query); assertEquals(ImmutableMap.of("ageohash_grid_agg", ImmutableMap.of("geohash_grid", ImmutableMap.of("field","a_field","precision",1))),builder.aggregations); } @Test(expected=FilterToElasticException.class) public void testUrlEncodedAggregationViewParamWithParseError() { final String aggregation = "%7B%22ageohash_grid_agg%22%3A%20%7B%22geohash_grid%22%3A%20%7B%22field%22%3A%20%22a_field%22%2C%20%22precision%22%3A%201%7D%7D%"; parameters.put("a", aggregation); builder.addViewParams(query); } @Test public void testAndQueryViewParam() throws JsonProcessingException { Map idsQuery = ImmutableMap.of("ids", ImmutableMap.of("value", ImmutableList.of("id"))); builder.queryBuilder = idsQuery; parameters.put("q", new ObjectMapper().writeValueAsString(idsQuery)); builder.addViewParams(query); assertNotNull(builder.getQueryBuilder()); } @Test public void testNativeOnlyQueryViewParam() throws JsonProcessingException { parameters.put("native-only", "true"); Map idsQuery = ImmutableMap.of("ids", ImmutableMap.of("value", ImmutableList.of("id"))); builder.queryBuilder = idsQuery; parameters.put("q", new ObjectMapper().writeValueAsString(idsQuery)); builder.addViewParams(query); assertEquals(builder.getQueryBuilder(), idsQuery); } @Test(expected=FilterToElasticException.class) public void testNativeQueryViewParamWithError() { parameters.put("native-only", "true"); builder.queryBuilder = ImmutableMap.of("ids", ImmutableMap.of("value", ImmutableList.of("id"))); parameters.put("q", "{\"x}"); builder.addViewParams(query); } @Test public void testTemporalStringLiteral() { After filter = ff.after(ff.property("dateAttr"), ff.literal("1970-01-01T00:00:00.000Z")); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttr", ImmutableMap.of("gt", "1970-01-01T00:00:00.000Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testNestedTemporalStringLiteral() { After filter = ff.after(ff.property("nested.datehej"), ff.literal("1970-01-01T00:00:00.000Z")); Map expectedFilter = ImmutableMap.of("range", ImmutableMap.of("nested.datehej", ImmutableMap.of("gt", "1970-01-01T00:00:00.000Z"))); Map expected = ImmutableMap.of("nested", ImmutableMap.of("path", "nested", "query", expectedFilter)); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testTemporalInstantLiteralDefaultFormat() throws ParseException { dateFormat = new SimpleDateFormat("yyyy-MM-dd"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); Date date1 = dateFormat.parse("1970-07-19"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); After filter = ff.after(ff.property("dateAttr"), ff.literal(temporalInstant)); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttr", ImmutableMap.of("gt", "1970-07-19T00:00:00.000Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testTemporalInstanceLiteralExplicitFormat() throws ParseException { addDateWithFormatToFeatureType("yyyy-MM-dd"); dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); Date date1 = dateFormat.parse("1970-07-19T01:02:03.456-0100"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); After filter = ff.after(ff.property("dateAttrWithFormat"), ff.literal(temporalInstant)); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttrWithFormat", ImmutableMap.of("gt", "1970-07-19"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testTemporalInstanceLiteralBasicDateTimeFormat() throws ParseException { addDateWithFormatToFeatureType("basic_date_time"); dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); Date date1 = dateFormat.parse("1970-07-19T01:02:03.456-0100"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); After filter = ff.after(ff.property("dateAttrWithFormat"), ff.literal(temporalInstant)); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttrWithFormat", ImmutableMap.of("gt", "19700719T020203.456Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testAfterFilter() throws ParseException { dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); Date date1 = dateFormat.parse("1970-07-19T01:02:03.456-0100"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); After filter = ff.after(ff.property("dateAttr"), ff.literal(temporalInstant)); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttr", ImmutableMap.of("gt", "1970-07-19T02:02:03.456Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testAfterFilterSwapped() throws ParseException { dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); Date date1 = dateFormat.parse("1970-07-19T01:02:03.456-0100"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); After filter = ff.after(ff.literal(temporalInstant), ff.property("dateAttr")); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttr", ImmutableMap.of("lt", "1970-07-19T02:02:03.456Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testAfterFilterPeriod() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); After filter = ff.after(ff.property("dateAttr"), ff.literal(period)); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttr", ImmutableMap.of("gt", "1970-07-19T07:08:09.101Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testAfterFilterPeriodSwapped() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); After filter = ff.after(ff.literal(period), ff.property("dateAttr")); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttr", ImmutableMap.of("lt", "1970-07-19T01:02:03.456Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testBeforeFilter() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); org.opengis.filter.temporal.Before filter = ff.before(ff.property("dateAttr"), ff.literal(temporalInstant)); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttr", ImmutableMap.of("lt", "1970-07-19T01:02:03.456Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testBeforeFilterPeriod() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); org.opengis.filter.temporal.Before filter = ff.before(ff.property("dateAttr"), ff.literal(period)); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttr", ImmutableMap.of("lt", "1970-07-19T01:02:03.456Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testBeforeFilterPeriodSwapped() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); org.opengis.filter.temporal.Before filter = ff.before(ff.literal(period), ff.property("dateAttr")); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttr", ImmutableMap.of("gt", "1970-07-19T07:08:09.101Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testBegins() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); Begins filter = ff.begins(ff.property("dateAttr"), ff.literal(period)); Map expected = ImmutableMap.of("term", ImmutableMap.of("dateAttr", "1970-07-19T01:02:03.456Z")); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test(expected=IllegalArgumentException.class) public void testBeginsWithMissingPeriod() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Begins filter = ff.begins(ff.property("dateAttr"), ff.literal(date1)); builder.visit(filter, null); } @Test(expected=IllegalArgumentException.class) public void testBeginsWithSwap() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); Begins filter = ff.begins(ff.literal(period), ff.property("dateAttr")); builder.visit(filter, null); } @Test public void testBegunBy() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); BegunBy filter = ff.begunBy(ff.literal(period), ff.property("dateAttr")); Map expected = ImmutableMap.of("term", ImmutableMap.of("dateAttr", "1970-07-19T01:02:03.456Z")); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testDuring() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); During filter = ff.during(ff.property("dateAttr"), ff.literal(period)); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttr", ImmutableMap.of("gt", "1970-07-19T01:02:03.456Z", "lt", "1970-07-19T07:08:09.101Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testEnds() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); Ends filter = ff.ends(ff.property("dateAttr"), ff.literal(period)); Map expected = ImmutableMap.of("term", ImmutableMap.of("dateAttr", "1970-07-19T07:08:09.101Z")); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testEndedBy() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); EndedBy filter = ff.endedBy(ff.literal(period), ff.property("dateAttr")); Map expected = ImmutableMap.of("term", ImmutableMap.of("dateAttr", "1970-07-19T07:08:09.101Z")); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test(expected=IllegalArgumentException.class) public void testEndedByWithoutSwap() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); EndedBy filter = ff.endedBy(ff.property("dateAttr"), ff.literal(period)); Map expected = ImmutableMap.of("term", ImmutableMap.of("dateAttr","1970-07-19T07:08:09.101Z")); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testTContains() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); TContains filter = ff.tcontains(ff.literal(period), ff.property("dateAttr")); Map expected = ImmutableMap.of("range", ImmutableMap.of("dateAttr", ImmutableMap.of("gt", "1970-07-19T01:02:03.456Z", "lt", "1970-07-19T07:08:09.101Z"))); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test public void testTEqualsFilter() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); TEquals filter = ff.tequals(ff.property("dateAttr"), ff.literal(temporalInstant)); Map expected = ImmutableMap.of("term", ImmutableMap.of("dateAttr", "1970-07-19T01:02:03.456Z")); builder.visit(filter, null); assertTrue(builder.createCapabilities().fullySupports(filter)); assertEquals(expected, builder.getQueryBuilder()); } @Test(expected=IllegalArgumentException.class) public void testTEqualsWithPeriod() throws ParseException { Date date1 = dateFormat.parse("1970-07-19T01:02:03.456Z"); Instant temporalInstant = new DefaultInstant(new DefaultPosition(date1)); Date date2 = dateFormat.parse("1970-07-19T07:08:09.101Z"); Instant temporalInstant2 = new DefaultInstant(new DefaultPosition(date2)); Period period = new DefaultPeriod(temporalInstant, temporalInstant2); TEquals filter = ff.tequals(ff.property("dateAttr"), ff.literal(period)); builder.visit(filter, null); } @Test public void testPropertyNameWithExtraData() { builder.visit(ff.property("doubleAttr"), Double.class); assertEquals("doubleAttr", builder.field); } @Test(expected=UnsupportedOperationException.class) public void testUnsupportedBinaryExpression() { builder.visit(ff.subtract(ff.property("doubleAttr"), ff.literal(2.5)), null); } @Test(expected=UnsupportedOperationException.class) public void testUnsupportedPropertyIsNill() { builder.visit(ff.isNil(ff.property("stringAttr"), ff.literal(2.5)), null); } @Test(expected=UnsupportedOperationException.class) public void testUnsupportedBinaryComparisonOperatorWithBinaryExpression() { builder.visit(ff.equals(ff.subtract(ff.property("doubleAttr"), ff.literal(2.5)),ff.literal(0.0)), null); } @Test(expected=UnsupportedOperationException.class) public void testUnsupportedBinaryTemporalOperator() { builder.visitBinaryTemporalOperator(); } @Test(expected=UnsupportedOperationException.class) public void testUnsupportedAdd() { builder.visit(ff.add(ff.property("p1"), ff.property("p2")), null); } @Test(expected=UnsupportedOperationException.class) public void testUnsupportedSubtract() { builder.visit(ff.subtract(ff.property("p1"), ff.property("p2")), null); } @Test(expected=UnsupportedOperationException.class) public void testUnsupportedMult() { builder.visit(ff.multiply(ff.property("p1"), ff.property("p2")), null); } @Test(expected=UnsupportedOperationException.class) public void testUnsupportedDivide() { builder.visit(ff.divide(ff.property("p1"), ff.property("p2")), null); } @Test(expected=UnsupportedOperationException.class) public void testUnsupportedFunction() { builder.visit(ff.function("sqrt", ff.property("doubleAttr")), null); } @Test(expected=UnsupportedOperationException.class) public void testUnsupportedLiteralTimePeriod() { builder.visitLiteralTimePeriod(); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticGeometryFilterIT.java ================================================ /* * GeoTools - The Open Source Java GIS Toolkit * http://geotools.org * * (C) 2014, Open Source Geospatial Foundation (OSGeo) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package mil.nga.giat.data.elasticsearch; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.data.simple.SimpleFeatureIterator; import org.junit.Ignore; import org.junit.Test; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.geom.impl.PackedCoordinateSequenceFactory; import static org.junit.Assert.*; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.GeometryDescriptor; import org.opengis.filter.And; import org.opengis.filter.FilterFactory; import org.opengis.filter.FilterFactory2; import org.opengis.filter.PropertyIsEqualTo; import org.opengis.filter.spatial.BBOX; import org.opengis.filter.spatial.Beyond; import org.opengis.filter.spatial.Contains; import org.opengis.filter.spatial.Crosses; import org.opengis.filter.spatial.DWithin; import org.opengis.filter.spatial.Disjoint; import org.opengis.filter.spatial.Equals; import org.opengis.filter.spatial.Intersects; import org.opengis.filter.spatial.Overlaps; import org.opengis.filter.spatial.Touches; import org.opengis.filter.spatial.Within; public class ElasticGeometryFilterIT extends ElasticTestSupport { @Test public void testBBOXFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); BBOX bbox = ff.bbox("geo", -180, -98, 180, 98, "EPSG:" + SOURCE_SRID); SimpleFeatureCollection features = featureSource.getFeatures(bbox); assertEquals(11, features.size()); } @Test public void testWithinPolygonFilter() throws Exception { init(); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); Polygon ls = gf.createPolygon(sf.create(new double[] { -180, -90, 180, -90, 180, 90, -180, 90, -180, -90 }, 2)); Within f = ff.within(ff.property("geo"), ff.literal(ls)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(11, features.size()); } @Test public void testBBOXAndEqualsFilter() throws Exception { init(); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsEqualTo property = ff.equals(ff.property("standard_ss"), ff.literal("IEEE 802.11b")); BBOX bbox = ff.bbox("geo", -180, -180, 180, 180, "EPSG:" + SOURCE_SRID); And filter = ff.and(property, bbox); SimpleFeatureCollection features = featureSource.getFeatures(filter); assertEquals(7, features.size()); } @Test public void testCrossesFilter() throws Exception { init("not-active","geo3"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); LineString ls = gf.createLineString(sf.create(new double[] { 0, 0, 2, 2 }, 2)); Crosses f = ff.crosses(ff.property("geo3"), ff.literal(ls)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.12"); } @Test public void testNotCrossesFilter() throws Exception { init("not-active","geo3"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); LineString ls = gf.createLineString(sf.create(new double[] { 0, 0, 1, 1 }, 2)); Crosses f = ff.crosses(ff.property("geo3"), ff.literal(ls)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(0, features.size()); } @Test public void testEqualFilter() throws Exception { init("not-active","geo3"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); Polygon ls = gf.createPolygon(sf.create(new double[] { 3, 2, 6, 2, 6, 7, 3, 7, 3, 2 }, 2)); Equals f = ff.equal(ff.property("geo3"), ff.literal(ls)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.13"); } @Test public void testDisjointFilter() throws Exception { init("not-active","geo3"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); Point ls = gf.createPoint(sf.create(new double[] { 0, 0 }, 2)); Disjoint f = ff.disjoint(ff.property("geo3"), ff.literal(ls)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(2, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.12"); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.13"); } @Test public void testTouchesFilter() throws Exception { init("not-active","geo3"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); Point ls = gf.createPoint(sf.create(new double[] { 1, 1 }, 2)); Touches f = ff.touches(ff.property("geo3"), ff.literal(ls)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.12"); } @Test public void testWithinFilter() throws Exception { init("not-active","geo3"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); Polygon ls = gf.createPolygon(sf.create(new double[] { 0, 0, 0, 6, 6, 6, 6, 0, 0, 0 }, 2)); Within f = ff.within(ff.property("geo3"), ff.literal(ls)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.12"); } @Test public void testOverlapsFilter() throws Exception { init("not-active","geo3"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); Polygon ls = gf.createPolygon(sf.create( new double[] { 5.5, 6, 7, 6, 7, 7, 5.5, 7, 5.5, 6 }, 2)); Overlaps f = ff.overlaps(ff.property("geo3"), ff.literal(ls)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.13"); } @Test public void testIntersectsFilter() throws Exception { init("not-active","geo3"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); Polygon ls = gf.createPolygon(sf.create(new double[] { 6, 6, 7, 6, 7, 7, 6, 7, 6, 6 }, 2)); Intersects f = ff.intersects(ff.property("geo3"), ff.literal(ls)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.13"); } @Test public void testContainsFilter() throws Exception { init("not-active","geo3"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); Polygon ls = gf.createPolygon(sf.create(new double[] { 2, 2, 3, 2, 3, 3, 2, 3, 2, 2 }, 2)); Contains f = ff.contains(ff.property("geo3"), ff.literal(ls)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.12"); } @Test public void testDWithinFilter() throws Exception { init(); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); Point ls = gf.createPoint(sf.create(new double[] { 0, 0 }, 2)); DWithin f = ff.dwithin(ff.property("geo"), ff.literal(ls), 3, "m"); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(2, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.01"); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.10"); } @Test public void testBeyondFilter() throws Exception { init(); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); Point ls = gf.createPoint(sf.create(new double[] { 0, 0 }, 2)); Beyond f = ff.beyond(ff.property("geo"), ff.literal(ls), 1, "m"); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(9, features.size()); } @Test public void testAlternateGeometry() throws Exception { init("active", "geo2"); SimpleFeatureType schema = featureSource.getSchema(); GeometryDescriptor gd = schema.getGeometryDescriptor(); assertNotNull(gd); assertEquals("geo2", gd.getLocalName()); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); BBOX bbox = ff.bbox("geo2", 6.5, 23.5, 7.5, 24.5, "EPSG:4326"); SimpleFeatureCollection features = featureSource.getFeatures(bbox); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.09"); } @Test public void testOgrStyleGeoPoint() throws Exception { init("not-active","geo4.coordinates"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); BBOX bbox = ff.bbox("geo4.coordinates", 0, 0, 5, 5, "EPSG:4326"); assertNotNull(featureSource.getSchema().getDescriptor("geo4.coordinates")); assertNull(featureSource.getSchema().getDescriptor("geo4.type")); SimpleFeatureCollection features = featureSource.getFeatures(bbox); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); SimpleFeature feature = fsi.next(); assertEquals(feature.getID(), "active.13"); assertNotNull(feature.getDefaultGeometry()); } @Test public void testGeoPointAsArray() throws Exception { init("active","geo5"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); Point ls = gf.createPoint(sf.create(new double[] { 0, 0 }, 2)); DWithin f = ff.dwithin(ff.property("geo5"), ff.literal(ls), 3, "m"); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(2, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); SimpleFeature feature = fsi.next(); assertEquals(feature.getID(), "active.01"); assertNotNull(feature.getDefaultGeometry()); assertTrue(fsi.hasNext()); feature = fsi.next(); assertEquals(feature.getID(), "active.10"); assertNotNull(feature.getDefaultGeometry()); } @Test public void testBBOXCoveringDateline() throws Exception { init("not-active","geo"); FilterFactory ff = dataStore.getFilterFactory(); BBOX bbox = ff.bbox("geo", 178, -90, 182, 90, "EPSG:" + SOURCE_SRID); SimpleFeatureCollection features = featureSource.getFeatures(bbox); assertEquals(2, features.size()); } @Test public void testBBOXBeyondDateline() throws Exception { init("not-active","geo"); FilterFactory ff = dataStore.getFilterFactory(); BBOX bbox = ff.bbox("geo", 180.5, -90, 182, 90, "EPSG:" + SOURCE_SRID); SimpleFeatureCollection features = featureSource.getFeatures(bbox); assertEquals(1, features.size()); } @Test public void testGeoShapeAsWkt() throws Exception { if (client.getVersion() < 6.2) { // wkt unsupported prior to v6.2 return; } init("not-active","geo6"); FilterFactory2 ff = (FilterFactory2) dataStore.getFilterFactory(); GeometryFactory gf = new GeometryFactory(); PackedCoordinateSequenceFactory sf = new PackedCoordinateSequenceFactory(); LineString ls = gf.createLineString(sf.create(new double[] { 0, 0, 2, 2 }, 2)); Crosses f = ff.crosses(ff.property("geo3"), ff.literal(ls)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.12"); sf = new PackedCoordinateSequenceFactory(); ls = gf.createLineString(sf.create(new double[] { 0, 0, 1, 1 }, 2)); f = ff.crosses(ff.property("geo5"), ff.literal(ls)); features = featureSource.getFeatures(f); assertEquals(0, features.size()); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticParserUtilTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import static org.junit.Assert.*; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Random; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryCollection; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.MultiLineString; import org.locationtech.jts.geom.MultiPoint; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import com.github.davidmoten.geo.GeoHash; @SuppressWarnings({"rawtypes", "unchecked"}) public class ElasticParserUtilTest { private ElasticParserUtil parserUtil; private GeometryFactory geometryFactory; private Map properties; private Random rand; private RandomGeometryBuilder rgb; @Before public void setUp() { parserUtil = new ElasticParserUtil(); geometryFactory = new GeometryFactory(); properties = new LinkedHashMap<>(); rand = new Random(123456789L); rgb = new RandomGeometryBuilder(); } @Test public void testParseGeoPointPatternForNegatives() { final double lat = rand.nextDouble() * 90 - 90; final double lon = rand.nextDouble() * 180 - 180; final String value = lat + "," + lon; final Geometry geom = parserUtil.createGeometry(value); assertTrue(geom.equals(geometryFactory.createPoint(new Coordinate(lon, lat)))); } @Test public void testGeoPointPatternForFractions() { final double lat = rand.nextDouble() * 2 - 1; final double lon = rand.nextDouble() * 2 - 1; final String value = (lat + "," + lon).replace("0.", "."); final Geometry geom = parserUtil.createGeometry(value); assertTrue(geom.equals(geometryFactory.createPoint(new Coordinate(lon, lat)))); } @Test public void testGeoPointPatternForWholeValues() { final Geometry geom = parserUtil.createGeometry("45,90"); assertTrue(geom.equals(geometryFactory.createPoint(new Coordinate(90, 45)))); } @Test public void testGeoPointPatternWithSpace() { final Geometry geom = parserUtil.createGeometry("45, 90"); assertTrue(geom.equals(geometryFactory.createPoint(new Coordinate(90, 45)))); } @Test public void testGeoPointAsDoubleProperties() { final double lat = rand.nextDouble() * 90 - 90; final double lon = rand.nextDouble() * 180 - 180; properties.put("lat", lat); properties.put("lon", lon); final Geometry geometry = parserUtil.createGeometry(properties); assertTrue(geometry.equals(geometryFactory.createPoint(new Coordinate(lon, lat)))); } @Test public void testGeoPointAsIntegerProperties() { final int lat = rand.nextInt(180) - 90; final int lon = rand.nextInt(360) - 180; properties.put("lat", lat); properties.put("lon", lon); final Geometry geometry = parserUtil.createGeometry(properties); assertTrue(geometry.equals(geometryFactory.createPoint(new Coordinate(lon, lat)))); } @Test public void testGeoPointAsStringProperties() { final double lat = rand.nextDouble() * 90 - 90; final double lon = rand.nextDouble() * 180 - 180; properties.put("lat", String.valueOf(lat)); properties.put("lon", String.valueOf(lon)); final Geometry geometry = parserUtil.createGeometry(properties); assertTrue(geometry.equals(geometryFactory.createPoint(new Coordinate(lon, lat)))); } @Test public void testGeoPointAsInvalidProperties() { properties.put("lat", true); properties.put("lon", true); final Geometry geometry = parserUtil.createGeometry(properties); assertNull(geometry); } @Test public void testGeoPointAsUnrecognizedProperties() { final double lat = rand.nextDouble() * 90 - 90; final double lon = rand.nextDouble() * 180 - 180; properties.put("latD", lat); properties.put("lonD", lon); final Geometry geometry = parserUtil.createGeometry(properties); assertNull(geometry); } @Test public void testGeoPointAsStringArray() { final double lat = rand.nextDouble() * 90 - 90; final double lon = rand.nextDouble() * 180 - 180; final Geometry geometry = parserUtil.createGeometry(Arrays.asList(String.valueOf(lon), String.valueOf(lat))); assertTrue(geometry.equals(geometryFactory.createPoint(new Coordinate(lon, lat)))); } @Test public void testGeoPointAsInvalidArray() { final Geometry geometry = parserUtil.createGeometry(Arrays.asList(true, true)); assertNull(geometry); } @Test public void testGeoPointAsDoubleArray() { final double lat = rand.nextDouble() * 90 - 90; final double lon = rand.nextDouble() * 180 - 180; final Geometry geometry = parserUtil.createGeometry(Arrays.asList(lon, lat)); assertTrue(geometry.equals(geometryFactory.createPoint(new Coordinate(lon, lat)))); } @Test public void testGeoHash() { final double lat = rand.nextDouble() * 90 - 90; final double lon = rand.nextDouble() * 180 - 180; String geohash = GeoHash.encodeHash(lat, lon, 11); final Geometry expected = geometryFactory.createPoint(new Coordinate(lon, lat)); final Geometry actual = parserUtil.createGeometry(geohash); assertEquals(0, expected.distance(actual), 1e-5); } @Test public void testInvalidStringGeometry() { final double lat = rand.nextDouble() * 90 - 90; final double lon = rand.nextDouble() * 180 - 180; assertNull(parserUtil.createGeometry(String.valueOf(lat))); assertNull(parserUtil.createGeometry(lat + "," + lon + "," + 0)); assertNull(parserUtil.createGeometry("x:" + lat + "," + lon)); } @Test public void testGeoShapePoint() throws IOException { Point geom = rgb.createRandomPoint(); assertTrue(parserUtil.createGeometry(rgb.toMap(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapePointString() { Point geom = rgb.createRandomPoint(); final Map map = new HashMap<>(); final List coords = new ArrayList<>(); coords.add(String.valueOf(geom.getX())); coords.add(String.valueOf(geom.getY())); map.put("coordinates", coords); map.put("type", "Point"); assertTrue(parserUtil.createGeometry(map).equalsExact(geom, 1e-9)); } @Test public void testGeoShapePointWkt() { Point geom = rgb.createRandomPoint(); assertTrue(parserUtil.createGeometry(rgb.toWkt(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeLineString() throws IOException { LineString geom = rgb.createRandomLineString(); assertTrue(parserUtil.createGeometry(rgb.toMap(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeLineStringWkt() { LineString geom = rgb.createRandomLineString(); assertTrue(parserUtil.createGeometry(rgb.toWkt(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapePolygon() throws IOException { Polygon geom = rgb.createRandomPolygon(); assertTrue(parserUtil.createGeometry(rgb.toMap(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeCircle() { Map inputMap = new HashMap<>(); inputMap.put("type", "circle"); inputMap.put("radius", "5nmi"); List posList = new ArrayList<>(); posList.add("8.0"); posList.add("35.0"); inputMap.put("coordinates", posList); Geometry geometry = parserUtil.createGeometry(inputMap); assertNotNull(geometry); } @Test public void testGeoShapeCircleWithMissingCenter() { Map inputMap = new HashMap<>(); inputMap.put("type", "circle"); inputMap.put("radius", "5nmi"); Geometry geometry = parserUtil.createGeometry(inputMap); assertNull(geometry); } @Test public void testGeoShapeCircleWithInvalidRadii() { Map inputMap = new HashMap<>(); inputMap.put("type", "circle"); inputMap.put("radius", "5qx"); List posList = new ArrayList<>(); posList.add("8.0"); posList.add("35.0"); inputMap.put("coordinates", posList); Geometry geometry = parserUtil.createGeometry(inputMap); assertNull(geometry); } @Test public void testGeoShapeCircleWithSmallRadii() { Map inputMap = new HashMap<>(); inputMap.put("type", "circle"); inputMap.put("radius", "0.0000000000001m"); List posList = new ArrayList<>(); posList.add("8.0"); posList.add("35.0"); inputMap.put("coordinates", posList); Geometry geometry = parserUtil.createGeometry(inputMap); assertNull(geometry); } @Test public void testGeoShapeCircleWithMissingRadii() { Map inputMap = new HashMap<>(); inputMap.put("type", "circle"); List posList = new ArrayList<>(); posList.add("8.0"); posList.add("35.0"); inputMap.put("coordinates", posList); Geometry geometry = parserUtil.createGeometry(inputMap); assertNull(geometry); } @Test public void testGeoShapePolygonWkt() { Polygon geom = rgb.createRandomPolygon(); assertTrue(parserUtil.createGeometry(rgb.toWkt(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeMultiPoint() throws IOException { MultiPoint geom = rgb.createRandomMultiPoint(); assertTrue(parserUtil.createGeometry(rgb.toMap(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeMultiPointWkt() { MultiPoint geom = rgb.createRandomMultiPoint(); assertTrue(parserUtil.createGeometry(rgb.toWkt(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeMultiLineString() throws IOException { MultiLineString geom = rgb.createRandomMultiLineString(); assertTrue(parserUtil.createGeometry(rgb.toMap(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeMultiLineStringWkt() { MultiLineString geom = rgb.createRandomMultiLineString(); assertTrue(parserUtil.createGeometry(rgb.toWkt(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeMultiPolygon() throws IOException { MultiPolygon geom = rgb.createRandomMultiPolygon(); assertTrue(parserUtil.createGeometry(rgb.toMap(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeMultiPolygonWkt() { MultiPolygon geom = rgb.createRandomMultiPolygon(); assertTrue(parserUtil.createGeometry(rgb.toWkt(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeGeometryCollection() throws IOException { rgb.setNumGeometries(5); GeometryCollection geom = rgb.createRandomGeometryCollection(); assertTrue(parserUtil.createGeometry(rgb.toMap(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeGeometryCollectionWkt() { rgb.setNumGeometries(5); GeometryCollection geom = rgb.createRandomGeometryCollection(); assertTrue(parserUtil.createGeometry(rgb.toWkt(geom)).equalsExact(geom, 1e-9)); } @Test public void testGeoShapeEnvelope() { Envelope envelope = rgb.createRandomEnvelope(); Geometry expected = geometryFactory.toGeometry(envelope); assertTrue(parserUtil.createGeometry(rgb.toMap(envelope)).equalsExact(expected, 1e-9)); } @Test public void testUnrecognizedGeometry() { final Geometry geom = parserUtil.createGeometry(3.0); assertNull(geom); } @Test public void testReadStringField() { properties.put("attr", "value"); List values = parserUtil.readField(properties, "attr"); assertEquals(1, values.size()); assertEquals("value", values.get(0)); } @Test public void testReadNumericField() { properties.put("attr", 2.3); List values = parserUtil.readField(properties, "attr"); assertEquals(1, values.size()); assertEquals(2.3, values.get(0)); } @Test public void testReadStringFieldWithConfuser() { properties.put("parent1", new LinkedHashMap()); ((Map) properties.get("parent1")).put("attr", "value2"); properties.put("attr", "value"); properties.put("parent2", new LinkedHashMap()); ((Map) properties.get("parent2")).put("attr", "value3"); List values = parserUtil.readField(properties, "attr"); assertEquals(1, values.size()); assertEquals("value", values.get(0)); } @Test public void testReadInnerString() { properties.put("parent", new LinkedHashMap()); ((Map) properties.get("parent")).put("attr", "value"); List values = parserUtil.readField(properties, "parent.attr"); assertEquals(1, values.size()); assertEquals("value", values.get(0)); } @Test public void testReadInnerStringArray() { properties.put("parent", new LinkedHashMap()); ((Map) properties.get("parent")).put("attr", Arrays.asList("value1", "value2")); List values = parserUtil.readField(properties, "parent.attr"); assertEquals(2, values.size()); assertEquals("value1", values.get(0)); assertEquals("value2", values.get(1)); } @Test public void testReadStringFromObjectArray() { properties.put("parent", new ArrayList>()); ((List) properties.get("parent")).add(new LinkedHashMap()); ((Map) ((List) properties.get("parent")).get(0)).put("attr", "value1"); ((List) properties.get("parent")).add(new LinkedHashMap()); ((Map) ((List) properties.get("parent")).get(1)).put("attr", "value2"); List values = parserUtil.readField(properties, "parent.attr"); assertEquals(2, values.size()); assertEquals("value1", values.get(0)); assertEquals("value2", values.get(1)); } @Test public void testReadStringFromObjectArrayOnceRemoved() { properties.put("parent", new ArrayList>()); ((List) properties.get("parent")).add(new LinkedHashMap()); ((Map) ((List) properties.get("parent")).get(0)).put("child", new LinkedHashMap()); ((Map) ((Map) ((List) properties.get("parent")).get(0)).get("child")).put("attr", "value1"); ((List) properties.get("parent")).add(new LinkedHashMap()); ((Map) ((List) properties.get("parent")).get(1)).put("child", new LinkedHashMap()); ((Map) ((Map) ((List) properties.get("parent")).get(1)).get("child")).put("attr", "value2"); List values = parserUtil.readField(properties, "parent.child.attr"); assertEquals(2, values.size()); assertEquals("value1", values.get(0)); assertEquals("value2", values.get(1)); } @Test public void testReadMapField() { final Map map = new LinkedHashMap<>(); properties.put("attr", map); map.put("attr2", "value2"); map.put("attr3", "value3"); List values = parserUtil.readField(properties, "attr"); assertEquals(1, values.size()); assertEquals(values.get(0), map); } @Test public void testConvertToMeters() { double distance = ElasticParserUtil.convertToMeters("1.2mm"); assertEquals(0.0012, distance, 0.0000000001); distance = ElasticParserUtil.convertToMeters("1.2"); assertEquals(1.2, distance, 0.0000000001); distance = ElasticParserUtil.convertToMeters("12"); assertEquals(12.0, distance, 0.0000000001); distance = ElasticParserUtil.convertToMeters("0.12cm"); assertEquals(0.0012, distance, 0.0000000001); try { ElasticParserUtil.convertToMeters("999xyz"); fail("Shouldn't get here"); } catch(IllegalArgumentException ignored) { } try { ElasticParserUtil.convertToMeters("mm1.2"); fail("Shouldn't get here"); } catch(IllegalArgumentException ignored) { } try { ElasticParserUtil.convertToMeters(".2"); fail("Shouldn't get here"); } catch(IllegalArgumentException ignored) { } try { ElasticParserUtil.convertToMeters(".2m"); fail("Shouldn't get here"); } catch(IllegalArgumentException ignored) { } } /** * This test ensures that dots within attribute names are handled. */ @Test public void testAttributesContainingDots() { properties.put("coalesceentity.name", "value"); List values = parserUtil.readField(properties, "coalesceentity.name"); Assert.assertEquals(1, values.size()); Assert.assertEquals(properties.get("coalesceentity.name"), values.get(0)); } /** * This test ensures that GeoJSON formatted values are returned correctly when dots are used within the attribute name. */ @Test public void testGeoJSONAttributesContainingDots() { Map geojson = new HashMap<>(); geojson.put("type", "circle"); geojson.put("coordinates", "[1,1]"); geojson.put("radius", "1m"); properties.put("coalesceentity.geo", geojson); List values = parserUtil.readField(properties, "coalesceentity.geo"); Assert.assertEquals(1, values.size()); Assert.assertEquals(properties.get("coalesceentity.geo"), values.get(0)); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticResponseTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import static org.junit.Assert.*; import java.io.IOException; import org.junit.Before; import org.junit.Test; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; public class ElasticResponseTest { private ObjectMapper mapper; @Before public void setup() { mapper = new ObjectMapper(); } @Test public void testDefaults() { ElasticResponse response = new ElasticResponse(); assertEquals(0, response.getTotalNumHits()); assertEquals(0, response.getNumHits()); assertTrue(response.getHits().isEmpty()); } @Test public void testTotalHits() throws IOException { ElasticResponse response = mapper.readValue("{\"hits\":{\"total\":10}}", ElasticResponse.class); assertEquals(10, response.getTotalNumHits()); } @Test public void testNullMaxScore() throws IOException { ElasticResponse response = mapper.readValue("{\"hits\":{}}", ElasticResponse.class); assertEquals(0, response.getMaxScore(), 1e-9); } @Test public void testMaxScore() throws IOException { ElasticResponse response = mapper.readValue("{\"hits\":{\"max_score\":0.8}}", ElasticResponse.class); assertEquals(0.8, response.getMaxScore(), 1e-6); } @Test public void testScroll() throws IOException { ElasticResponse response = mapper.readValue("{\"_scroll_id\":\"12345\"}", ElasticResponse.class); assertEquals("12345", response.getScrollId()); } @Test public void getNumHits() throws IOException { ElasticResponse response = mapper.readValue("{\"hits\":{\"hits\":[{},{},{}]}}", ElasticResponse.class); assertEquals(3, response.getNumHits()); } @Test public void testHitId() throws IOException { ElasticResponse response = mapper.readValue("{\"hits\":{\"hits\":[{\"_id\": \"5\"}]}}", ElasticResponse.class); assertEquals(1, response.getResults().getHits().size()); assertEquals("5", response.getResults().getHits().get(0).getId()); } @Test public void testHitIndex() throws IOException { ElasticResponse response = mapper.readValue("{\"hits\":{\"hits\":[{\"_index\": \"test\"}]}}", ElasticResponse.class); assertEquals(1, response.getResults().getHits().size()); assertEquals("test", response.getResults().getHits().get(0).getIndex()); } @Test public void testHitType() throws IOException { ElasticResponse response = mapper.readValue("{\"hits\":{\"hits\":[{\"_type\": \"test\"}]}}", ElasticResponse.class); assertEquals(1, response.getResults().getHits().size()); assertEquals("test", response.getResults().getHits().get(0).getType()); } @Test public void testHitScore() throws IOException { ElasticResponse response = mapper.readValue("{\"hits\":{\"hits\":[{\"_score\": 0.4}]}}", ElasticResponse.class); assertEquals(1, response.getResults().getHits().size()); assertEquals(0.4, response.getResults().getHits().get(0).getScore(), 1e-6); } @Test public void testHitFields() throws IOException { String content = "{\"hits\":{\"hits\":[{\"fields\": {\"tags\":[\"red\"]}}]}}"; ElasticResponse response = mapper.readValue(content, ElasticResponse.class); assertEquals(1, response.getResults().getHits().size()); assertNotNull(response.getResults().getHits().get(0).field("tags")); assertEquals(ImmutableList.of("red"), response.getResults().getHits().get(0).field("tags")); response = mapper.readValue("{\"hits\":{\"hits\":[{}]}}", ElasticResponse.class); assertNull(response.getResults().getHits().get(0).field("tags")); } @Test public void testHitSource() throws IOException { String content = "{\"hits\":{\"hits\":[{\"_source\": {\"tags\":[\"red\"]}}]}}"; ElasticResponse response = mapper.readValue(content, ElasticResponse.class); assertEquals(1, response.getResults().getHits().size()); assertNotNull(response.getResults().getHits().get(0).getSource()); assertEquals(ImmutableList.of("red"), response.getResults().getHits().get(0).getSource().get("tags")); } @Test public void testAggregations() throws IOException { String content = "{\"aggregations\":{\"first\":{\"buckets\": [{\"key\":\"0\",\"doc_count\":10}]}}}"; ElasticResponse response = mapper.readValue(content, ElasticResponse.class); assertEquals(1, response.getAggregations().size()); assertEquals(1, response.getAggregations().size()); ElasticAggregation aggregations = response.getAggregations().values().stream().findFirst().orElse(null); assertNotNull(aggregations); assertEquals(1, aggregations.getBuckets().size()); assertEquals(ImmutableMap.of("key","0","doc_count",10), aggregations.getBuckets().get(0)); } @Test public void testMissingAggregation() throws IOException { ElasticResponse response = mapper.readValue("{}", ElasticResponse.class); assertNull(response.getAggregations()); } @Test public void testToString() throws IOException { String content = "{\"hits\":{\"hits\":[{\"_source\": {\"tags\":[\"red\"]}}]}, " + "\"aggregations\":{\"first\":{\"buckets\": [{\"key\":\"0\",\"doc_count\":10}]}}}"; ElasticResponse response = mapper.readValue(content, ElasticResponse.class); String responseStr = response.toString(); assertTrue(responseStr.contains("hits=1")); assertTrue(responseStr.contains("numBuckets=1")); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticTemporalFilterIT.java ================================================ /* * GeoTools - The Open Source Java GIS Toolkit * http://geotools.org * * (C) 2014, Open Source Geospatial Foundation (OSGeo) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package mil.nga.giat.data.elasticsearch; import java.util.Date; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.data.simple.SimpleFeatureIterator; import org.junit.Test; import static org.junit.Assert.*; import org.opengis.feature.simple.SimpleFeature; import org.opengis.filter.Filter; import org.opengis.filter.FilterFactory; import org.opengis.temporal.Period; public class ElasticTemporalFilterIT extends ElasticTestSupport { @Test public void testLessDateFilterLong() throws Exception { init(); Date testDate = new Date(1005912798000L); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.lessOrEqual(ff.property("installed_td"), ff.literal(testDate.getTime())); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(4, features.size()); SimpleFeatureIterator it = features.features(); while (it.hasNext()) { SimpleFeature next = it.next(); Date date = (Date) next.getAttribute("installed_td"); assertTrue(date.before(testDate) || date.equals(testDate)); } it.close(); } @Test public void testGreaterDateFilterLong() throws Exception { init(); Date testDate = new Date(1005912798000L); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.greaterOrEqual(ff.property("installed_td"), ff.literal(testDate.getTime())); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(7, features.size()); SimpleFeatureIterator it = features.features(); while (it.hasNext()) { SimpleFeature next = it.next(); Date date = (Date) next.getAttribute("installed_td"); assertTrue(date.after(testDate) || date.equals(testDate)); } it.close(); } @Test public void testCompareDateFilter() throws Exception { init(); Date testDate = DATE_FORMAT.parse("2009-06-28 00:00:00"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.lessOrEqual(ff.property("installed_tdt"), ff.literal(testDate)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(4, features.size()); SimpleFeatureIterator it = features.features(); while (it.hasNext()) { Date date = (Date) it.next().getAttribute("installed_tdt"); assertTrue(date.before(testDate) || date.equals(testDate)); } it.close(); f = ff.greaterOrEqual(ff.property("installed_tdt"), ff.literal(testDate)); features = featureSource.getFeatures(f); assertEquals(5, features.size()); it = features.features(); while (it.hasNext()) { Date date = (Date) it.next().getAttribute("installed_tdt"); assertTrue(date.after(testDate) || date.equals(testDate)); } it.close(); } @Test public void testAfterFilter() throws Exception { init(); Date testDate = DATE_FORMAT.parse("2009-28-06 00:00:00"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.after(ff.property("installed_tdt"), ff.literal(testDate)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(5, features.size()); } @Test public void testAfterInterval() throws Exception { init(); Period period = period("2011-21-05 00:00:00", "2011-15-09 00:00:00"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.after(ff.property("installed_tdt"), ff.literal(period)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(4, features.size()); } @Test public void testBeforeFilter() throws Exception { init(); Date testDate = DATE_FORMAT.parse("2009-28-06 00:00:00"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.before(ff.property("installed_tdt"), ff.literal(testDate)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(4, features.size()); } @Test public void testBeforeInterval() throws Exception { init(); Period period = period("2000-12-11 00:00:00", "2011-05-21 00:00:00"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.before(ff.property("installed_tdt"), ff.literal(period)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); } @Test public void testBegins() throws Exception { init(); Period period = period("2004-20-06 03:44:56", "2014-22-06 03:44:56"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.begins(ff.property("installed_tdt"), ff.literal(period)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); } @Test public void testBegunBy() throws Exception { init(); Period period = period("2004-20-06 03:44:56", "2014-22-06 03:44:56"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.begunBy(ff.literal(period), ff.property("installed_tdt")); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); } @Test public void testEnds() throws Exception { init(); Period period = period("2002-20-06 03:44:56", "2004-20-06 03:44:56"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.ends(ff.property("installed_tdt"), ff.literal(period)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); } @Test public void testEndedBy() throws Exception { init(); Period period = period("2004-11-06 03:44:56", "2004-20-06 03:44:56"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.endedBy(ff.literal(period), ff.property("installed_tdt")); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); } @Test public void testDuring() throws Exception { init(); Period period = period("2004-19-06 03:44:56", "2004-20-06 03:44:58"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.during(ff.property("installed_tdt"), ff.literal(period)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); } @Test public void testTContains() throws Exception { init(); Period period = period("2004-19-06 03:44:56", "2004-20-06 03:44:58"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.tcontains(ff.literal(period), ff.property("installed_tdt")); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); } @Test public void testTEquals() throws Exception { init(); Date testDate = DATE_FORMAT.parse("2013-01-10 00:13:11"); FilterFactory ff = dataStore.getFilterFactory(); Filter f = ff.tequals(ff.property("installed_tdt"), ff.literal(testDate)); SimpleFeatureCollection features = featureSource.getFeatures(f); assertEquals(1, features.size()); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticTestSupport.java ================================================ /* * GeoTools - The Open Source Java GIS Toolkit * http://geotools.org * * (C) 2014, Open Source Geospatial Foundation (OSGeo) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package mil.nga.giat.data.elasticsearch; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Scanner; import java.util.TimeZone; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.http.HttpHost; import org.elasticsearch.client.RestClient; import org.geotools.data.simple.SimpleFeatureIterator; import org.geotools.feature.NameImpl; import org.geotools.temporal.object.DefaultInstant; import org.geotools.temporal.object.DefaultPeriod; import org.geotools.temporal.object.DefaultPosition; import org.junit.After; import org.junit.Before; import org.opengis.feature.simple.SimpleFeature; import org.opengis.temporal.Instant; import org.opengis.temporal.Period; class ElasticTestSupport { private static final String TEST_FILE = "wifiAccessPoint.json"; private static final String LEGACY_ACTIVE_MAPPINGS_FILE = "active_mappings_legacy.json"; private static final String NG_ACTIVE_MAPPINGS_FILE = "active_mappings_ng.json"; private static final String ACTIVE_MAPPINGS_FILE = "active_mappings.json"; private static final int numShards = 1; private static final int numReplicas = 0; static final String TYPE_NAME = "active"; private static final boolean SCROLL_ENABLED = false; private static final long SCROLL_SIZE = 20; private static final ObjectMapper mapper = new ObjectMapper(); private static final ObjectReader mapReader = mapper.readerWithView(Map.class).forType(HashMap.class); static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-dd-MM HH:mm:ss"); static final int PORT = 9200; final int SOURCE_SRID = 4326; String indexName; ElasticFeatureSource featureSource; static ElasticDataStore dataStore; ElasticLayerConfiguration config; ElasticClient client; @Before public void beforeTest() throws Exception { indexName = "gt_integ_test_" + System.nanoTime(); client = new RestElasticClient(RestClient.builder(new HttpHost("localhost", PORT, "http")).build()); Map params = createConnectionParams(); ElasticDataStoreFactory factory = new ElasticDataStoreFactory(); dataStore = (ElasticDataStore) factory.createDataStore(params); createIndices(); } @After public void afterTest() throws Exception { performRequest("DELETE", "/" + indexName, null); dataStore.dispose(); client.close(); } private void createIndices() throws IOException { // create index and add mappings Map settings = new HashMap<>(); settings.put("settings", ImmutableMap.of("number_of_shards", numShards, "number_of_replicas", numReplicas)); final String filename; if (client.getVersion() < 5) { filename = LEGACY_ACTIVE_MAPPINGS_FILE; } else if (client.getVersion() > 6.1) { filename = NG_ACTIVE_MAPPINGS_FILE; } else { filename = ACTIVE_MAPPINGS_FILE; } final InputStream resource = ClassLoader.getSystemResourceAsStream(filename); if (resource != null) { try (Scanner s = new Scanner(resource)) { s.useDelimiter("\\A"); Map source = mapReader.readValue(s.next()); if (client.getVersion() < 7) { Map mappings = new HashMap<>(); mappings.put(TYPE_NAME, source); settings.put("mappings", mappings); } else { settings.put("mappings", source); } } } performRequest("PUT", "/" + indexName, settings); // add alias Map aliases = ImmutableMap.of("actions", ImmutableList.of(ImmutableMap.of("index", indexName, "alias", indexName + "_alias"))); performRequest("PUT", "/_alias", aliases); } private void indexDocuments(String status) throws IOException { final InputStream inputStream = ClassLoader.getSystemResourceAsStream(TEST_FILE); if (inputStream != null) { try (Scanner scanner = new Scanner(inputStream)) { scanner.useDelimiter(System.lineSeparator()); final StringBuilder builder = new StringBuilder(); while (scanner.hasNext()) { final String line = scanner.next(); if (!line.startsWith("#")) { builder.append(line); } } final Map content = mapReader.readValue(builder.toString()); @SuppressWarnings("unchecked") final List> features = (List>) content.get("features"); for (final Map featureSource : features) { if (featureSource.containsKey("status_s") && featureSource.get("status_s").equals(status)) { final String id = featureSource.containsKey("id") ? (String) featureSource.get("id") : null; final String typeName = client.getVersion() < 7 ? TYPE_NAME : "_doc"; performRequest("POST", "/" + indexName + "/" + typeName + "/" + id, featureSource); } } performRequest("POST", "/" + indexName + "/_refresh", null); } finally { inputStream.close(); } } } Map createConnectionParams() { Map params = new HashMap<>(); params.put(ElasticDataStoreFactory.HOSTNAME.key, "localhost"); params.put(ElasticDataStoreFactory.HOSTPORT.key, PORT); params.put(ElasticDataStoreFactory.INDEX_NAME.key, indexName); params.put(ElasticDataStoreFactory.SCROLL_ENABLED.key, SCROLL_ENABLED); params.put(ElasticDataStoreFactory.SCROLL_SIZE.key, SCROLL_SIZE); return params; } void init() throws Exception { DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC")); init("active"); } void init(String layerName) throws Exception { init(layerName, "geo"); } void init(String status, String geometryField) throws Exception { indexDocuments(status); List attributes = dataStore.getElasticAttributes(new NameImpl(TYPE_NAME)); config = new ElasticLayerConfiguration(TYPE_NAME); List layerAttributes = new ArrayList<>(); for (ElasticAttribute attribute : attributes) { attribute.setUse(true); if (geometryField.equals(attribute.getName())) { ElasticAttribute copy = new ElasticAttribute(attribute); copy.setDefaultGeometry(true); layerAttributes.add(copy); } else { layerAttributes.add(attribute); } } config.getAttributes().clear(); config.getAttributes().addAll(layerAttributes); dataStore.setLayerConfiguration(config); featureSource = (ElasticFeatureSource) dataStore.getFeatureSource(TYPE_NAME); } private void performRequest(String method, String endpoint, Map body) throws IOException { ((RestElasticClient) client).performRequest(method, endpoint, body); } private Date date(String date) throws ParseException { return DATE_FORMAT.parse(date); } private Instant instant(String d) throws ParseException { return new DefaultInstant(new DefaultPosition(date(d))); } Period period(String d1, String d2) throws ParseException { return new DefaultPeriod(instant(d1), instant(d2)); } List readFeatures(SimpleFeatureIterator iterator) { final List features = new ArrayList<>(); try { while (iterator.hasNext()) { features.add(iterator.next()); } } finally { iterator.close(); } return features; } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/ElasticViewParametersFilterIT.java ================================================ /* * GeoTools - The Open Source Java GIS Toolkit * http://geotools.org * * (C) 2014, Open Source Geospatial Foundation (OSGeo) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package mil.nga.giat.data.elasticsearch; import java.net.URLEncoder; import java.util.HashMap; import java.util.Map; import org.geotools.data.Query; import org.geotools.data.simple.SimpleFeatureIterator; import org.geotools.data.store.ContentFeatureCollection; import org.geotools.util.factory.Hints; import org.junit.Test; import static org.junit.Assert.*; import org.opengis.filter.FilterFactory; import org.opengis.filter.PropertyIsEqualTo; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; public class ElasticViewParametersFilterIT extends ElasticTestSupport { private final ObjectMapper mapper = new ObjectMapper(); @Test public void testNativeTermQuery() throws Exception { init("not-active"); Map vparams = new HashMap<>(); Map query = ImmutableMap.of("term", ImmutableMap.of("security_ss", "WPA")); vparams.put("q", mapper.writeValueAsString(query)); Hints hints = new Hints(Hints.VIRTUAL_TABLE_PARAMETERS, vparams); Query q = new Query(featureSource.getSchema().getTypeName()); q.setHints(hints); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsEqualTo filter = ff.equals(ff.property("speed_is"), ff.literal("300")); q.setFilter(filter); ContentFeatureCollection features = featureSource.getFeatures(q); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.12"); } @Test public void testEncodedNativeTermQuery() throws Exception { init("not-active"); Map vparams = new HashMap<>(); Map query = ImmutableMap.of("term", ImmutableMap.of("security_ss", "WPA")); vparams.put("q", URLEncoder.encode(mapper.writeValueAsString(query), "UTF-8")); Hints hints = new Hints(Hints.VIRTUAL_TABLE_PARAMETERS, vparams); Query q = new Query(featureSource.getSchema().getTypeName()); q.setHints(hints); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsEqualTo filter = ff.equals(ff.property("speed_is"), ff.literal("300")); q.setFilter(filter); ContentFeatureCollection features = featureSource.getFeatures(q); assertEquals(1, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getID(), "active.12"); } @Test public void testNativeBooleanQuery() throws Exception { init(); Map vparams = new HashMap<>(); Map query = ImmutableMap.of("bool", ImmutableMap.of("must", ImmutableMap.of("term", ImmutableMap.of("security_ss", "WPA")), "must_not", ImmutableMap.of("term", ImmutableMap.of("modem_b", true)))); vparams.put("q", mapper.writeValueAsString(query)); Hints hints = new Hints(Hints.VIRTUAL_TABLE_PARAMETERS, vparams); Query q = new Query(featureSource.getSchema().getTypeName()); q.setHints(hints); FilterFactory ff = dataStore.getFilterFactory(); PropertyIsEqualTo filter = ff.equals(ff.property("speed_is"), ff.literal("300")); q.setFilter(filter); ContentFeatureCollection features = featureSource.getFeatures(q); assertEquals(2, features.size()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getAttribute("modem_b"), false); assertTrue(fsi.hasNext()); assertEquals(fsi.next().getAttribute("modem_b"), false); } @Test public void testNativeAggregation() throws Exception { init(); Map vparams = new HashMap<>(); Map query = ImmutableMap.of("agg", ImmutableMap.of("geohash_grid", ImmutableMap.of("field", "geo", "precision", 3))); vparams.put("a", mapper.writeValueAsString(query)); Hints hints = new Hints(Hints.VIRTUAL_TABLE_PARAMETERS, vparams); Query q = new Query(featureSource.getSchema().getTypeName()); q.setHints(hints); ContentFeatureCollection features = featureSource.getFeatures(q); assertFalse(features.isEmpty()); SimpleFeatureIterator fsi = features.features(); assertTrue(fsi.hasNext()); assertNotNull(fsi.next().getAttribute("_aggregation")); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/GeohashUtilTest.java ================================================ package mil.nga.giat.data.elasticsearch; import org.geotools.geometry.jts.ReferencedEnvelope; import static org.geotools.geometry.jts.ReferencedEnvelope.EVERYTHING; import org.junit.Test; import com.google.common.collect.ImmutableMap; import static org.junit.Assert.*; import java.util.HashMap; import java.util.Map; public class GeohashUtilTest { @Test public void testComputePrecision() { assertEquals(1, GeohashUtil.computePrecision(new ReferencedEnvelope(-180,180,-90,90,null), 32, 0.9)); assertEquals(2, GeohashUtil.computePrecision(new ReferencedEnvelope(-180,180,-90,90,null), 1024, 0.9)); assertEquals(3, GeohashUtil.computePrecision(new ReferencedEnvelope(-180,180,-90,90,null), 32768, 0.9)); assertEquals(2, GeohashUtil.computePrecision(new ReferencedEnvelope(-180,180,-90,90,null), 1000, 0.9)); assertEquals(3, GeohashUtil.computePrecision(new ReferencedEnvelope(-180,180,-90,90,null), 1500, 0.9)); assertEquals(1, GeohashUtil.computePrecision(new ReferencedEnvelope(EVERYTHING.getMinX(),EVERYTHING.getMaxX(),EVERYTHING.getMinY(),EVERYTHING.getMaxY(),null), 32, 0.9)); assertEquals(1, GeohashUtil.computePrecision(new ReferencedEnvelope(-1,1,-1,1,null), 0, 0.9)); assertEquals(6, GeohashUtil.computePrecision(new ReferencedEnvelope(-180,180,-90,90,null), (long) 1e9, 0.9)); assertEquals(6, GeohashUtil.computePrecision(new ReferencedEnvelope(-180,180,-90,90,null), 1, 1e9)); assertEquals(1, GeohashUtil.computePrecision(new ReferencedEnvelope(-180,180,-90,90,null), 1, -1e9)); } @Test public void doNotUpdatePrecisionIfAlreadyDefined() { final Map geohashGridAgg = new HashMap<>(ImmutableMap.of("field", "name", "precision", 3)); final Map>> aggregations; aggregations = ImmutableMap.of("first",ImmutableMap.of("geohash_grid",geohashGridAgg)); final Map expected = ImmutableMap.of("first", ImmutableMap.of("geohash_grid",ImmutableMap.of("field","name","precision",3))); GeohashUtil.updateGridAggregationPrecision(aggregations, 2); assertEquals(expected, aggregations); } @Test public void updatePrecisionIfNotDefined() { final Map geohashGridAgg = new HashMap<>(ImmutableMap.of("field", "name")); final Map>> aggregations; aggregations = ImmutableMap.of("first",ImmutableMap.of("geohash_grid",geohashGridAgg)); final Map expected = ImmutableMap.of("first", ImmutableMap.of("geohash_grid",ImmutableMap.of("field","name","precision",2))); GeohashUtil.updateGridAggregationPrecision(aggregations, 2); assertEquals(expected, aggregations); } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/RandomGeometryBuilder.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import org.geotools.geojson.geom.GeometryJSON; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryCollection; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.MultiLineString; import org.locationtech.jts.geom.MultiPoint; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.io.WKTWriter; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; class RandomGeometryBuilder { private final GeometryFactory geometryFactory; private final Random random; private final GeometryJSON geometryJson; private final int decimals; private final int numPoints; private int numGeometries; private final WKTWriter wktWriter; public RandomGeometryBuilder() { geometryFactory = new GeometryFactory(); random = new Random(123456789L); decimals = 4; numPoints = 10; numGeometries = 2; geometryJson = new GeometryJSON(decimals); wktWriter = new WKTWriter(); } public Point createRandomPoint() { return geometryFactory.createPoint(createRandomCoord()); } public LineString createRandomLineString() { Coordinate[] coords = new Coordinate[numPoints]; for (int i=0; i 179 || coord1.y > 89) { coord1 = createRandomCoord(); } final Coordinate coord2 = createRandomCoord((int) (coord1.x+0.5), (int) (coord1.y+0.5)); return new Envelope(coord1, coord2); } private Coordinate createRandomCoord() { return createRandomCoord(-180, -90); } private Coordinate createRandomCoord(int minx, int miny) { int dx = 180 -minx; int dy = 90 -miny; final int factor = (int) Math.pow(10, decimals); final double lon = (random.nextInt(dx*factor)+minx*factor)/((double) factor); final double lat = (random.nextInt(dy*factor)+miny*factor)/((double) factor); return new Coordinate(lon, lat); } public Map toMap(Geometry geometry) throws IOException { final String json = geometryJson.toString(geometry); return new ObjectMapper().readValue(json, new TypeReference>() {}); } public String toWkt(Geometry geometry) { return wktWriter.write(geometry); } public Map toMap(Envelope envelope) { final Map properties = new HashMap<>(); final List> coordinates = new ArrayList<>(); coordinates.add(Arrays.asList(envelope.getMinX(), envelope.getMaxY())); coordinates.add(Arrays.asList(envelope.getMaxX(), envelope.getMinY())); properties.put("type", "envelope"); properties.put("coordinates", coordinates); return properties; } public void setNumGeometries(int numGeometries) { this.numGeometries = numGeometries; } } ================================================ FILE: gt-elasticsearch/src/test/java/mil/nga/giat/data/elasticsearch/RestElasticClientTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.data.elasticsearch; import static org.junit.Assert.*; import org.apache.http.Header; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentMatcher; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import static org.mockito.Mockito.*; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.http.HttpEntity; import org.apache.http.StatusLine; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.message.BasicHeader; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContext; import org.springframework.security.core.context.SecurityContextHolder; public class RestElasticClientTest { private RestClient mockRestClient; private RestClient mockProxyRestClient; private Response mockResponse; private HttpEntity mockEntity; private StatusLine mockStatusLine; private RestElasticClient client; private RestElasticClient proxyClient; private Authentication mockAuth; private SecurityContext mockStx; @Before public void setup() throws UnsupportedOperationException, IOException { mockRestClient = mock(RestClient.class); mockProxyRestClient = mock(RestClient.class); mockResponse = mock(Response.class); mockEntity = mock(HttpEntity.class); mockStatusLine = mock(StatusLine.class); mockAuth = mock(Authentication.class); mockStx = mock(SecurityContext.class); when(mockResponse.getEntity()).thenReturn(mockEntity); when(mockResponse.getStatusLine()).thenReturn(mockStatusLine); when(mockStatusLine.getStatusCode()).thenReturn(200); final Response mockDefaultResponse = mock(Response.class); final StatusLine mockDefaultStatusLine = mock(StatusLine.class); when(mockDefaultResponse.getStatusLine()).thenReturn(mockDefaultStatusLine); when(mockDefaultStatusLine.getStatusCode()).thenReturn(500); when(mockRestClient.performRequest(any())).thenReturn(mockDefaultResponse); when(mockStx.getAuthentication()).thenReturn(mockAuth); when(mockAuth.isAuthenticated()).thenReturn(true); when(mockAuth.getName()).thenReturn("runAsTest"); SecurityContextHolder.setContext(mockStx); client = new RestElasticClient(mockRestClient); proxyClient = new RestElasticClient(mockRestClient, mockProxyRestClient, true); InputStream inputStream = new ByteArrayInputStream("{}".getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); } @Test public void testVersion() throws IOException { mockVersion("6.7.8"); final double version = client.getVersion(); assertEquals(6.7, version, 1e-9); } @Test public void testVersionWithInvalidFormat() throws IOException { mockVersion("6"); final double version = client.getVersion(); assertEquals(RestElasticClient.DEFAULT_VERSION, version, 1e-9); } @Test public void testVersionWithError() throws IOException { String content = "{}"; InputStream inputStream = new ByteArrayInputStream(content.getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); final RequestMatcher matcher = new RequestMatcher("GET", "/", null, null); when(mockRestClient.performRequest(argThat(matcher))).thenThrow(IOException.class); final double version = client.getVersion(); assertEquals(RestElasticClient.DEFAULT_VERSION, version, 1e-9); } @Test public void testGetTypes() throws IOException { String content = "{\"status_s\": {\"mappings\": " + "{\"properties\": {\"status_s\": {\"type\": \"keyword\"}}}}}"; InputStream inputStream = new ByteArrayInputStream(content.getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); when(mockRestClient.performRequest(new Request("GET", "/status_s/_mapping"))).thenReturn(mockResponse); List names = client.getTypes("status_s"); assertEquals(1, names.size()); } @Test public void testLegacyGetTypes() throws IOException { String content = "{\"status_s\": {\"mappings\": " + "{\"active\": {\"properties\": {\"status_s\": {\"type\": \"keyword\"}}}}}}"; InputStream inputStream = new ByteArrayInputStream(content.getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); mockVersion("6.0.0"); when(mockRestClient.performRequest(new Request("GET", "/status_s/_mapping"))).thenReturn(mockResponse); List names = client.getTypes("status_s"); assertEquals(1, names.size()); assertEquals("active", names.get(0)); } @Test public void testGetMapping() throws IOException { String content = "{\"status_s\": {\"mappings\":" + "{\"properties\": {\"status_s\": {\"type\": \"keyword\"}}}}}"; InputStream inputStream = new ByteArrayInputStream(content.getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); when(mockRestClient.performRequest(new Request("GET", "/status_s/_mapping"))).thenReturn(mockResponse); Map> expected = ImmutableMap.of("status_s", ImmutableMap.of("type","keyword")); assertEquals(expected, client.getMapping("status_s", "active")); } @Test public void testLegacyGetMapping() throws IOException { String content = "{\"status_s\": {\"mappings\": {\"active\": " + "{\"properties\": {\"status_s\": {\"type\": \"keyword\"}}}}}}"; InputStream inputStream = new ByteArrayInputStream(content.getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); mockVersion("6.0.0"); when(mockRestClient.performRequest(new Request("GET", "/status_s/_mapping/active"))).thenReturn(mockResponse); Map> expected = ImmutableMap.of("status_s", ImmutableMap.of("type","keyword")); assertEquals(expected, client.getMapping("status_s", "active")); } @Test public void testGetMappingWithMissingIndexAndNoAlias() throws IOException { String content = "{\"status_s3\": {\"mappings\": " + "{\"properties\": {\"status_s\": {\"type\": \"keyword\"}}}}}}"; InputStream inputStream = new ByteArrayInputStream(content.getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); when(mockRestClient.performRequest(new Request("GET", "/status_s/_mapping"))).thenReturn(mockResponse); when(mockRestClient.performRequest(new Request("GET", "/_alias/status_s"))).thenThrow(IOException.class); Map> expected = ImmutableMap.of("status_s", ImmutableMap.of("type","keyword")); assertEquals(expected, client.getMapping("status_s", "active")); } @Test public void testGetMappingMissing() throws IOException { String content = "{}"; InputStream inputStream = new ByteArrayInputStream(content.getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); when(mockRestClient.performRequest(new Request("GET", "/status_s/_mapping"))).thenReturn(mockResponse); when(mockRestClient.performRequest(new Request("GET", "/_alias/status_s"))).thenThrow(IOException.class); assertNull(client.getMapping("status_s", "active")); } @Test public void testGetMappingWithExtra() throws IOException { String content = "{\"status_s\": {\"mappings\":" + "{\"properties\": {\"status_s\": {\"type\": \"keyword\"}}}}}"; InputStream inputStream = new ByteArrayInputStream(content.getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); when(mockRestClient.performRequest(new Request("GET", "/status_s/_mapping"))).thenReturn(mockResponse); Map> expected = ImmutableMap.of("status_s", ImmutableMap.of("type","keyword")); assertEquals(expected, client.getMapping("status_s", "active")); } @Test public void testGetMappingNotFound() throws IOException { ResponseException mockException = mock(ResponseException.class); when(mockException.getResponse()).thenReturn(mockResponse); when(mockStatusLine.getStatusCode()).thenReturn(404); when(mockRestClient.performRequest(any(Request.class))).thenThrow(mockException); assertNull(client.getMapping("status_s", "active")); } @Test(expected=ResponseException.class) public void testGetMappingWithError() throws IOException { ResponseException mockException = mock(ResponseException.class); when(mockException.getResponse()).thenReturn(mockResponse); when(mockStatusLine.getStatusCode()).thenReturn(400); when(mockRestClient.performRequest(any(Request.class))).thenThrow(mockException); client.getMapping("status_s", "active"); } @Test public void testSearchSize() throws IOException { final RequestMatcher matcher = new RequestMatcher("/status_s/_search", "{\"size\":10}"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setSize(10); client.search("status_s", "active", request); } @Test public void testLegacySearch() throws IOException { final RequestMatcher matcher = new RequestMatcher("/status_s/active/_search", "{\"size\":10}"); mockVersion("6.0.0"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setSize(10); client.search("status_s", "active", request); } @Test public void testSearchSizeWithProxyClient() throws IOException { final RequestMatcher matcher = new RequestMatcher("POST", "/status_s/_search", "{\"size\":10}", "runAsTest"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setSize(10); proxyClient.search("status_s", "active", request); } @Test public void testSearchFrom() throws IOException { final RequestMatcher matcher = new RequestMatcher("/status_s/_search", "{\"from\":10}"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setFrom(10); client.search("status_s", "active", request); } @Test public void testSearchFromWithProxyClient() throws IOException { final RequestMatcher matcher = new RequestMatcher("POST", "/status_s/_search", "{\"from\":10}", "runAsTest"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setFrom(10); proxyClient.search("status_s", "active", request); } @Test public void testSearchScroll() throws IOException { final RequestMatcher matcher = new RequestMatcher("/status_s/_search?scroll=10s", "{}"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setScroll(10); client.search("status_s", "active", request); } @Test public void testSearchScrollWithProxyClient() throws IOException { final RequestMatcher matcher = new RequestMatcher("POST", "/status_s/_search?scroll=10s", "{}", "runAsTest"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setScroll(10); proxyClient.search("status_s", "active", request); } @Test public void testSearchSourceFiltering() throws IOException { final RequestMatcher matcher = new RequestMatcher("/status_s/_search", "{\"_source\":\"obj1\"}"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.addSourceInclude("obj1"); client.search("status_s", "active", request); } @Test public void testSearchSourceFilteringWithProxyClient() throws IOException { final RequestMatcher matcher = new RequestMatcher("POST", "/status_s/_search", "{\"_source\":\"obj1\"}", "runAsTest"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.addSourceInclude("obj1"); proxyClient.search("status_s", "active", request); } @Test public void testSearchSourceFiltering2() throws IOException { final RequestMatcher matcher = new RequestMatcher("/status_s/_search", "{\"_source\":[\"obj1\",\"obj2\"]}"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.addSourceInclude("obj1"); request.addSourceInclude("obj2"); client.search("status_s", "active", request); } @Test public void testSearchSourceFilteringWithProxyClient2() throws IOException { final RequestMatcher matcher = new RequestMatcher("POST", "/status_s/_search", "{\"_source\":[\"obj1\",\"obj2\"]}", "runAsTest"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.addSourceInclude("obj1"); request.addSourceInclude("obj2"); proxyClient.search("status_s", "active", request); } @Test public void testSearchStoredFields() throws IOException { mockVersion("2.4.4"); final RequestMatcher matcher2 = new RequestMatcher("/status_s/active/_search", "{\"fields\":[\"obj1\"]}"); doReturn(mockResponse).when(mockRestClient).performRequest(argThat(matcher2)); ElasticRequest request = new ElasticRequest(); request.addField("obj1"); client.search("status_s", "active", request); } @Test public void testSearchStoredFieldsWithProxyClient() throws IOException { mockVersion("2.4.4"); final RequestMatcher matcher2 = new RequestMatcher("POST", "/status_s/active/_search", "{\"fields\":[\"obj1\"]}", "runAsTest"); doReturn(mockResponse).when(mockProxyRestClient).performRequest(argThat(matcher2)); ElasticRequest request = new ElasticRequest(); request.addField("obj1"); proxyClient.search("status_s", "active", request); } @Test public void testSearchSort() throws IOException { final RequestMatcher matcher = new RequestMatcher("/status_s/_search", "{\"sort\":[{\"obj1\":{\"order\":\"asc\"}}]}"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.addSort("obj1", "asc"); client.search("status_s", "active", request); } @Test public void testSearchSortWithProxyClient() throws IOException { final RequestMatcher matcher = new RequestMatcher("POST", "/status_s/_search", "{\"sort\":[{\"obj1\":{\"order\":\"asc\"}}]}", "runAsTest"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.addSort("obj1", "asc"); proxyClient.search("status_s", "active", request); } @Test public void testSearchSort2() throws IOException { final RequestMatcher matcher = new RequestMatcher("/status_s/_search", "{\"sort\":[{\"obj1\":{\"order\":\"asc\"}},{\"obj2\":{\"order\":\"desc\"}}]}"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.addSort("obj1", "asc"); request.addSort("obj2", "desc"); client.search("status_s", "active", request); } @Test public void testSearchSortWithProxyClient2() throws IOException { final RequestMatcher matcher = new RequestMatcher("POST", "/status_s/_search", "{\"sort\":[{\"obj1\":{\"order\":\"asc\"}},{\"obj2\":{\"order\":\"desc\"}}]}", "runAsTest"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.addSort("obj1", "asc"); request.addSort("obj2", "desc"); proxyClient.search("status_s", "active", request); } @Test public void testSearchResponse() throws IOException { String content = "{\"hits\": {\"total\": 10, \"max_score\": 0.8, \"hits\": [{\"_index\": \"index_name\"}, {}]}}"; final RequestMatcher matcher = new RequestMatcher("/status_s/_search", "{}"); InputStream inputStream = new ByteArrayInputStream(content.getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); final ElasticResponse response = client.search("status_s", "active", new ElasticRequest()); assertEquals(10, response.getResults().getTotal().intValue()); assertEquals("index_name", response.getHits().get(0).getIndex()); assertEquals(0.8f, response.getResults().getMaxScore(), 1e-9); } @Test public void testSearchResponseWithProxyClient() throws IOException { String content = "{\"hits\": {\"total\": 10, \"max_score\": 0.8, \"hits\": [{\"_index\": \"index_name\"}, {}]}}"; final RequestMatcher matcher = new RequestMatcher("POST", "/status_s/_search", "{}", "runAsTest"); InputStream inputStream = new ByteArrayInputStream(content.getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); final ElasticResponse response = proxyClient.search("status_s", "active", new ElasticRequest()); assertEquals(10, response.getResults().getTotal().intValue()); assertEquals("index_name", response.getHits().get(0).getIndex()); assertEquals(0.8f, response.getResults().getMaxScore(), 1e-9); } @Test public void testQuery() throws IOException { final Map query = ImmutableMap.of("term", ImmutableMap.of("obj1", "value1")); final String data = new ObjectMapper().writeValueAsString(ImmutableMap.of("query", query)); final RequestMatcher matcher = new RequestMatcher("/status_s/_search", data); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setQuery(query); client.search("status_s", "active", request); } @Test public void testLegacyQuery() throws IOException { final Map query = ImmutableMap.of("term", ImmutableMap.of("obj1", "value1")); final String data = new ObjectMapper().writeValueAsString(ImmutableMap.of("query", query)); final RequestMatcher matcher = new RequestMatcher("/status_s/active/_search", data); mockVersion("6.0.0"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setQuery(query); client.search("status_s", "active", request); } @Test public void testQueryWithProxyClient() throws IOException { final Map query = ImmutableMap.of("term", ImmutableMap.of("obj1", "value1")); final String data = new ObjectMapper().writeValueAsString(ImmutableMap.of("query", query)); final RequestMatcher matcher = new RequestMatcher("POST", "/status_s/_search", data, "runAsTest"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setQuery(query); proxyClient.search("status_s", "active", request); } @Test public void testLegacyQueryWithProxyClient() throws IOException { final Map query = ImmutableMap.of("term", ImmutableMap.of("obj1", "value1")); final String data = new ObjectMapper().writeValueAsString(ImmutableMap.of("query", query)); final RequestMatcher matcher = new RequestMatcher("POST", "/status_s/active/_search", data, "runAsTest"); mockVersion("6.0.0"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setQuery(query); proxyClient.search("status_s", "active", request); } @Test public void testAggregation() throws IOException { final RequestMatcher matcher = new RequestMatcher("/status_s/_search", "{\"aggregations\":{\"ageohash_grid_agg\":{\"geohash_grid\": {\"field\":\"a_field\",\"precision\":1}}}}"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setAggregations(ImmutableMap.of("ageohash_grid_agg", ImmutableMap.of("geohash_grid", ImmutableMap.of("field","a_field","precision",1)))); client.search("status_s", "active", request); } @Test public void testLegacyAggregation() throws IOException { final RequestMatcher matcher = new RequestMatcher("/status_s/active/_search", "{\"aggregations\":{\"ageohash_grid_agg\":{\"geohash_grid\": {\"field\":\"a_field\",\"precision\":1}}}}"); mockVersion("6.0.0"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setAggregations(ImmutableMap.of("ageohash_grid_agg", ImmutableMap.of("geohash_grid", ImmutableMap.of("field","a_field","precision",1)))); client.search("status_s", "active", request); } @Test public void testAggregationWithProxyClient() throws IOException { final RequestMatcher matcher = new RequestMatcher("POST", "/status_s/_search", "{\"aggregations\":{\"ageohash_grid_agg\":{\"geohash_grid\": {\"field\":\"a_field\",\"precision\":1}}}}", "runAsTest"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); ElasticRequest request = new ElasticRequest(); request.setAggregations(ImmutableMap.of("ageohash_grid_agg", ImmutableMap.of("geohash_grid", ImmutableMap.of("field","a_field","precision",1)))); proxyClient.search("status_s", "active", request); } @Test(expected=IOException.class) public void testBadStatus() throws IOException { when(mockStatusLine.getStatusCode()).thenReturn(404); when(mockRestClient.performRequest(any(Request.class))).thenReturn(mockResponse); client.search("status_s", "active", new ElasticRequest()); } @Test public void testNextScroll() throws IOException { final RequestMatcher matcher = new RequestMatcher("/_search/scroll", "{\"scroll_id\":\"id1\",\"scroll\":\"10s\"}"); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); client.scroll("id1", 10); } @Test public void testNextScrollWithProxyClient() throws IOException { final RequestMatcher matcher = new RequestMatcher("POST", "/_search/scroll", "{\"scroll_id\":\"id1\",\"scroll\":\"10s\"}", "runAsTest"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); proxyClient.scroll("id1", 10); } @Test public void testClearScroll() throws IOException { final RequestMatcher matcher = new RequestMatcher("DELETE", "/_search/scroll", "{\"scroll_id\":[\"id1\"]}", null); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); client.clearScroll(ImmutableSet.of("id1")); } @Test public void testClearScrollWithProxyClient() throws IOException { final RequestMatcher matcher = new RequestMatcher("DELETE", "/_search/scroll", "{\"scroll_id\":[\"id1\"]}", "runAsTest"); when(mockProxyRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); proxyClient.clearScroll(ImmutableSet.of("id1")); } @Test public void testClose() throws IOException { client.close(); verify(mockRestClient).close(); } @Test public void testCloseWithProxyClient() throws IOException { proxyClient.close(); verify(mockRestClient).close(); verify(mockProxyRestClient).close(); } @Test public void testRemoveMapping() { Map data = createMap("key", "value"); RestElasticClient.removeMapping(null, "key", data, null); assertTrue(data.isEmpty()); data = createMap("key", "value", "parent", ImmutableMap.of("key", "value")); RestElasticClient.removeMapping(null, "key", data, null); assertEquals(data, ImmutableMap.of("parent", ImmutableMap.of("key", "value"))); data = ImmutableMap.of("key", "value", "parent", createMap("key", 10)); RestElasticClient.removeMapping("parent", "key", data, null); assertEquals(data, ImmutableMap.of("key", "value", "parent", new HashMap<>())); data = new HashMap<>(); RestElasticClient.removeMapping(null, "key", data, null); assertTrue(data.isEmpty()); data = ImmutableMap.of("parent", new HashMap<>()); RestElasticClient.removeMapping("parent", "key", data, null); assertEquals(data, ImmutableMap.of("parent", new HashMap<>())); data = createMap("key", ImmutableList.of(ImmutableMap.of("key", "value"), ImmutableMap.of("key","value","parent", createMap("key","value","key2","value2")))); RestElasticClient.removeMapping("parent", "key", data, null); assertEquals(data, createMap("key", ImmutableList.of(ImmutableMap.of("key", "value"), ImmutableMap.of("key","value","parent", createMap("key2","value2"))))); } @Test(expected=IllegalStateException.class) public void testMissingAuth() throws IOException { when(mockStx.getAuthentication()).thenReturn(null); proxyClient.search("status_s", "active", new ElasticRequest()); } @Test(expected=IllegalStateException.class) public void testUnauthenticated() throws IOException { when(mockAuth.isAuthenticated()).thenReturn(false); proxyClient.search("status_s", "active", new ElasticRequest()); } private void mockVersion(String version) throws IOException { final Response mockResponse = mock(Response.class); final HttpEntity mockEntity = mock(HttpEntity.class); final StatusLine mockStatusLine = mock(StatusLine.class); when(mockResponse.getEntity()).thenReturn(mockEntity); when(mockResponse.getStatusLine()).thenReturn(mockStatusLine); when(mockStatusLine.getStatusCode()).thenReturn(200); String content = "{\"version\":{\"number\":\"" + version + "\"}}"; InputStream inputStream = new ByteArrayInputStream(content.getBytes()); when(mockEntity.getContent()).thenReturn(inputStream); final RequestMatcher matcher = new RequestMatcher("GET", "/", null, null); when(mockRestClient.performRequest(argThat(matcher))).thenReturn(mockResponse); } private Map createMap(Object... params) { Map data = new HashMap<>(); for (int i=0; i { private static final String RUN_AS_HEADER_FIELD = "es-security-runas-user"; private final String runAsUser; RunAsHeaderMatcher(String runAsUser) { this.runAsUser = runAsUser; } @Override public boolean matches(BasicHeader header) { return RUN_AS_HEADER_FIELD.equals(header.getName()) && runAsUser.equals(header.getValue()); } } class JsonByteArrayEntityMatcher implements ArgumentMatcher { private Map data; JsonByteArrayEntityMatcher(byte[] data) throws IOException { ObjectMapper mapper = new ObjectMapper(); this.data = mapper.readValue(data, new TypeReference>() {}); } @Override public boolean matches(ByteArrayEntity argument) { ByteArrayInputStream inputStream = (ByteArrayInputStream) argument.getContent(); ObjectMapper mapper = new ObjectMapper(); try { Map data = mapper.readValue(inputStream, new TypeReference>() {}); return this.data.equals(data); } catch (IOException e) { return false; } } } class RequestMatcher implements ArgumentMatcher { private final String method; private final String endpoint; private JsonByteArrayEntityMatcher entityMatcher; private RunAsHeaderMatcher headerMatcher; RequestMatcher(String endpoint, String data) throws IOException { this("POST", endpoint, data, null); } RequestMatcher(String method, String endpoint, String data, String user) throws IOException { this.method = method; this.endpoint = endpoint; this.entityMatcher = data != null ? new JsonByteArrayEntityMatcher(data.getBytes()) : null; this.headerMatcher = user != null ? new RunAsHeaderMatcher(user) : null; } @Override public boolean matches(Request argument) { if (argument == null) { return false; } final List
    headers = argument.getOptions().getHeaders(); if (!this.method.equals(argument.getMethod())) { return false; } if (!this.endpoint.equals(argument.getEndpoint())) { return false; } if (this.entityMatcher == null && argument.getEntity() != null) { return false; } if (this.entityMatcher != null && !this.entityMatcher.matches((ByteArrayEntity) argument.getEntity())) { return false; } if (this.headerMatcher != null && headers.isEmpty()) { return false; } if (this.headerMatcher == null && !headers.isEmpty()) { return false; } return this.headerMatcher == null || headerMatcher.matches((BasicHeader) headers.get(0)); } } } ================================================ FILE: gt-elasticsearch/src/test/resources/README.md ================================================ JSON data files are based on src/test/resources/wifiAccessPoint.xml from the GeoTools SOLR module: GeoTools - The Open Source Java GIS Toolkit http://geotools.org (C) 2014, Open Source Geospatial Foundation (OSGeo) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; version 2.1 of the License. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. ================================================ FILE: gt-elasticsearch/src/test/resources/active_mappings.json ================================================ { "properties": { "geo": { "type": "geo_point" }, "geo2": { "type": "geo_shape", "tree": "quadtree" }, "geo3": { "type": "geo_shape", "tree": "quadtree" }, "geo4": { "properties": { "coordinates": { "type": "geo_point" } } }, "geo5": { "type": "geo_point" }, "id": { "type": "keyword" }, "installed_tdt": { "type": "date", "format": "dateOptionalTime" }, "installed_td": { "type": "date" }, "modem_b": { "type": "boolean", "store": "true" }, "security_ss": { "type": "keyword", "store": "true" }, "speed_is": { "type": "integer", "store": "true" }, "standard_ss": { "type": "keyword" }, "status_s": { "type": "keyword" }, "vendor_s": { "type": "keyword" }, "object": { "properties": { "hejda": { "type": "double" } } }, "nested": { "type": "nested", "properties": { "hej": { "type": "double" }, "parent": { "type": "nested", "properties": { "child": { "type": "keyword" } } } } }, "long": { "type": "long" } } } ================================================ FILE: gt-elasticsearch/src/test/resources/active_mappings_legacy.json ================================================ { "properties": { "geo": { "type": "geo_point" }, "geo2": { "type": "geo_shape", "tree": "quadtree" }, "geo3": { "type": "geo_shape", "tree": "quadtree" }, "geo4": { "properties": { "coordinates": { "type": "geo_point" } } }, "geo5": { "type": "geo_point" }, "id": { "type": "string", "index": "not_analyzed" }, "installed_tdt": { "type": "date", "format": "dateOptionalTime" }, "installed_td": { "type": "date" }, "modem_b": { "type": "boolean", "store": "true" }, "security_ss": { "type": "string", "index": "not_analyzed", "store": "true" }, "speed_is": { "type": "integer", "store": "true" }, "standard_ss": { "type": "string", "index": "not_analyzed" }, "status_s": { "type": "string", "index": "not_analyzed" }, "vendor_s": { "type": "string", "index": "not_analyzed" }, "object": { "properties": { "hejda": { "type": "double" } } }, "nested": { "type": "nested", "properties": { "hej": { "type": "double" }, "parent": { "type": "nested", "properties": { "child": { "type": "string", "index": "not_analyzed" } } } } }, "long": { "type": "long" } } } ================================================ FILE: gt-elasticsearch/src/test/resources/active_mappings_ng.json ================================================ { "properties": { "geo": { "type": "geo_point" }, "geo2": { "type": "geo_shape", "tree": "quadtree" }, "geo3": { "type": "geo_shape", "tree": "quadtree" }, "geo4": { "properties": { "coordinates": { "type": "geo_point" } } }, "geo5": { "type": "geo_point" }, "geo6": { "type": "geo_shape", "tree": "quadtree" }, "id": { "type": "keyword" }, "installed_tdt": { "type": "date", "format": "dateOptionalTime" }, "installed_td": { "type": "date" }, "modem_b": { "type": "boolean", "store": "true" }, "security_ss": { "type": "keyword", "store": "true" }, "speed_is": { "type": "integer", "store": "true" }, "standard_ss": { "type": "keyword" }, "status_s": { "type": "keyword" }, "vendor_s": { "type": "keyword" }, "object": { "properties": { "hejda": { "type": "double" } } }, "nested": { "type": "nested", "properties": { "hej": { "type": "double" }, "parent": { "type": "nested", "properties": { "child": { "type": "keyword" } } } } }, "long": { "type": "long" } } } ================================================ FILE: gt-elasticsearch/src/test/resources/log4j.properties ================================================ log4j.rootLogger=OFF,A1 log4j.appender.A1=org.apache.log4j.ConsoleAppender log4j.appender.A1.layout=org.apache.log4j.PatternLayout log4j.appender.A1.layout.ConversionPattern=%d [%t] %-5p %c - %m%n #log4j.logger.org.springframework=WARN,A1 #log4j.additivity.org.springframework=false ================================================ FILE: gt-elasticsearch/src/test/resources/logging.properties ================================================ ############################################################ # Global properties ############################################################ # "handlers" specifies a comma separated list of log Handler # classes. These handlers will be installed during VM startup. # Note that these classes must be on the system classpath. # By default we only configure a ConsoleHandler, which will only # show messages at the INFO and above levels. handlers= java.util.logging.ConsoleHandler # Default global logging level. # This specifies which kinds of events are logged across # all loggers. For any given facility this global level # can be overriden by a facility specific level # Note that the ConsoleHandler also has a separate level # setting to limit messages printed to the console. .level= SEVERE ############################################################ # Handler specific properties. # Describes specific configuration info for Handlers. ############################################################ # Limit the message that are printed on the console to INFO and above. java.util.logging.ConsoleHandler.level = SEVERE java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter # Example to customize the SimpleFormatter output format # to print one-line log message like this: # : [] # # java.util.logging.SimpleFormatter.format=%4$s: %5$s [%1$tc]%n ############################################################ # Facility specific properties. # Provides extra control for each logger. ############################################################ mil.nga.giat.data.elasticsearch.level = FINEST ================================================ FILE: gt-elasticsearch/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker ================================================ mock-maker-inline ================================================ FILE: gt-elasticsearch/src/test/resources/requirements.txt ================================================ requests ================================================ FILE: gt-elasticsearch/src/test/resources/test_index.py ================================================ """ Script creates status_s index for testing. """ import argparse import json import os import requests script_dir = os.path.dirname(os.path.realpath(__file__)) parser = argparse.ArgumentParser() parser.add_argument('--attributes', nargs='+', default=[]) args = parser.parse_args() index_name = 'status_s' f_mappings = 'active_mappings.json' f_docs = 'wifiAccessPoint.json' elastic_url = 'http://localhost:9200' auth = ('elastic', 'changeme') version = float(json.loads(requests.get('http://localhost:9200', auth=auth).text)['version']['number'].split('.')[0]) type_name = 'active' if version < 7 else '_doc' requests.delete(f'{elastic_url}/{index_name}', auth=auth) requests.put(f'{elastic_url}/{index_name}', auth=auth) with open(f'{script_dir}/{f_mappings}') as f: mappings = json.loads(f.read()) if args.attributes: [mappings['properties'].pop(key) for key in list(mappings['properties'].keys()) if key not in args.attributes] requests.put(f'{elastic_url}/{index_name}/_mapping/{type_name if version < 7 else ""}', json=mappings, auth=auth) with open(f'{script_dir}/{f_docs}') as f: features = json.load(f)['features'] for item in [item for item in features if item['status_s'] == 'active']: if args.attributes: [item.pop(key) for key in list(item.keys()) if key not in args.attributes] requests.put(f'{elastic_url}/{index_name}/{type_name}/{item["id"]}', json=item, auth=auth) requests.put(f'{elastic_url}//_xpack/security/role/status_admin', json={'indices': [{'privileges': ['all'], 'names': ['status*']}]}, auth=auth) requests.put(f'{elastic_url}/_xpack/security/user/admin', json={'password': 'statusadmin', 'roles': ['status_admin']}, auth=auth) ================================================ FILE: gt-elasticsearch/src/test/resources/wifiAccessPoint.json ================================================ { "features": [ { "geo": "0,0", "geo2": { "coordinates": [[0,0],[0,1]], "type": "linestring" }, "geo3": { "coordinates": [[0,1],[1,0]], "type": "envelope" }, "geo5": [ 0, 0 ], "id": "01", "installed_td": 1305912798000, "installed_tdt": "2011-05-20T17:33:18Z", "modem_b": true, "nested": { "hej": 10.8, "parent": { "child": "ab" } }, "object": { "hejda": 12.8 }, "security_ss": [ "WEP", "WPA" ], "speed_is": 150, "standard_ss": [ "IEEE 802.11b", "IEEE 802.11n" ], "status_s": "active", "vendor_s": "D-Link" }, { "geo": "15,10", "geo2": { "coordinates": [[10,15],[10,16]], "type": "linestring" }, "geo3": { "coordinates": [[10,16],[16,11]], "type": "envelope" }, "geo5": [ 10, 15 ], "id": "02", "installed_td": 1305912798000, "installed_tdt": "2012-01-10T07:43:58Z", "modem_b": true, "nested": { "hej": 16.9, "parent": { "child": "bd" } }, "object": { "hejda": 19.9 }, "security_ss": "WEP", "speed_is": [ 150, 300 ], "standard_ss": "IEEE 802.11b", "status_s": "active", "vendor_s": "Linksys" }, { "geo": "7,5", "geo2": { "coordinates": [[5,7],[5,8]], "type": "linestring" }, "geo3": { "coordinates": [[5,8],[8,5]], "type": "envelope" }, "geo5": [ 5, 7 ], "id": "03", "installed_td": 1305912798000, "installed_tdt": "2013-10-01T00:13:11Z", "modem_b": true, "nested": { "hej": 7.9, "parent": { "child": "bc" } }, "object": { "hejda": 16.3 }, "security_ss": "WEP", "speed_is": 150, "standard_ss": "IEEE 802.11n", "status_s": "active", "vendor_s": "D-Link" }, { "geo": "2,24", "geo2": { "coordinates": [[24,2],[24,3]], "type": "linestring" }, "geo3": { "coordinates": [[22,3],[24,2]], "type": "envelope" }, "geo5": [ 24, 2 ], "id": "04", "installed_td": 1305912798000, "installed_tdt": "2013-12-01T00:16:21Z", "modem_b": false, "nested": { "hej": 13.8, "parent": { "child": "cb" } }, "object": { "hejda": 18.8 }, "security_ss": [ "WEP", "WPA" ], "speed_is": 300, "standard_ss": [ "IEEE 802.11b", "IEEE 802.11n" ], "status_s": "active", "vendor_s": "Cisco" }, { "geo": "17,12", "geo2": { "coordinates": [[12,17],[12,18]], "type": "linestring" }, "geo3": { "coordinates": [[12,18],[13,17]], "type": "envelope" }, "geo5": [ 12, 17 ], "id": "05", "installed_td": 1305912798000, "installed_tdt": "2000-11-11T11:16:21Z", "modem_b": true, "nested": { "hej": 7.0, "parent": { "child": "aa" } }, "object": { "hejda": 7.9 }, "security_ss": "WPA", "speed_is": 150, "standard_ss": "IEEE 802.11n", "status_s": "active", "vendor_s": "Cisco" }, { "geo": "9,13", "geo2": { "coordinates": [[13,9],[13,10]], "type": "linestring" }, "geo3": { "coordinates": [[13,10],[14,9]], "type": "envelope" }, "geo5": [ 13, 9 ], "id": "06", "installed_td": 1305912798000, "installed_tdt": "2001-04-28T23:23:21Z", "modem_b": false, "nested": { "hej": 10.9, "parent": { "child": "bb" } }, "object": { "hejda": 8.0 }, "security_ss": "WPA", "speed_is": 300, "standard_ss": [ "IEEE 802.11b", "IEEE 802.11n" ], "status_s": "active", "vendor_s": "D-Link" }, { "geo": "27,22", "geo2": { "coordinates": [[22,27],[22,28]], "type": "linestring" }, "geo3": { "coordinates": [[22,28],[23,27]], "type": "envelope" }, "geo5": [ 22, 27 ], "id": "07", "installed_td": 10, "installed_tdt": "2004-06-20T03:44:56Z", "modem_b": false, "nested": { "hej": 12.0, "parent": { "child": "ba" } }, "object": { "hejda": 4.1 }, "security_ss": "WPA", "speed_is": 150, "standard_ss": "IEEE 802.11n", "status_s": "active", "vendor_s": "TP-Link" }, { "geo": "3,2", "geo2": { "coordinates": [[2,3],[2,4]], "type": "linestring" }, "geo3": { "coordinates": [[2,4],[3,3]], "type": "envelope" }, "geo5": [ 2, 3 ], "id": "08", "installed_td": 1305912798000, "installed_tdt": "2014-02-12T06:37:48Z", "modem_b": true, "nested": { "hej": 12.2, "parent": { "child": "ca" } }, "object": { "hejda": 0.4 }, "security_ss": "WEP", "speed_is": 150, "standard_ss": "IEEE 802.11b", "status_s": "active", "vendor_s": "Linksys" }, { "geo": "23,7", "geo2": { "coordinates": [[7,23],[7,24]], "type": "linestring" }, "geo3": { "coordinates": [[7,24],[8,23]], "type": "envelope" }, "geo5": [ 7, 23 ], "id": "09", "installed_td": 10, "installed_tdt": "2008-09-23T16:43:23Z", "modem_b": true, "nested": { "hej": 12.4, "parent": { "child": "ce" } }, "object": { "hejda": 0.5 }, "security_ss": "WEP", "speed_is": [ 150, 300 ], "standard_ss": "IEEE 802.11n", "status_s": "active", "vendor_s": "D-Link" }, { "geo": "0,0", "geo2": { "coordinates": [[0,0],[0,1]], "type": "linestring" }, "geo3": { "coordinates": [[0,1],[1,0]], "type": "envelope" }, "geo5": [ 0, 0 ], "id": "10", "installed_td": 10, "modem_b": true, "nested": { "hej": 6.3, "parent": { "child": "bd" } }, "object": { "hejda": 14.8 }, "security_ss": "WPA", "speed_is": [ 150, 300 ], "standard_ss": [ "IEEE 802.11b", "IEEE 802.11n" ], "status_s": "active", "vendor_s": "HP" }, { "geo": "43,1", "geo2": { "coordinates": [[1,43],[1,44]], "type": "linestring" }, "geo3": { "coordinates": [[1,44],[2,43]], "type": "envelope" }, "geo5": [ 1, 43 ], "id": "11", "installed_td": 10, "modem_b": true, "nested": { "hej": 11.8, "parent": { "child": "cd" } }, "object": { "hejda": 13.6 }, "speed_is": 150, "standard_ss": "IEEE 802.11b", "status_s": "active", "vendor_s": "Asus" }, { "geo3": { "coordinates": [[[1,1],[5,1],[5,5],[1,5],[1,1]]], "type": "polygon" }, "geo6": "POLYGON ((1 1, 5 1, 5 5, 1 5, 1 1))", "id": "12", "installed_td": 10, "modem_b": true, "security_ss": "WPA", "speed_is": 300, "standard_ss": [ "IEEE 802.11b", "IEEE 802.11n" ], "status_s": "not-active", "vendor_s": "Asus" }, { "geo3": { "coordinates": [[[3,2],[6,2],[6,7],[3,7],[3,2]]], "type": "polygon" }, "geo4": { "coordinates": [3.0,2.5], "type": "POINT" }, "geo6": "POLYGON ((3 2, 6 2, 6 7, 3 7, 3 2))", "id": "13", "installed_td": 10, "modem_b": true, "speed_is": 300, "standard_ss": [ "IEEE 802.11b", "IEEE 802.11n" ], "status_s": "not-active", "vendor_s": "Asus" }, { "geo": "45,-179", "geo2": { "coordinates": [[-179,45],[-179,46]], "type": "linestring" }, "id": "100", "status_s": "not-active" }, { "geo": "45,179", "geo2": { "coordinates": [[179,45],[179,46]], "type": "linestring" }, "id": "101", "status_s": "not-active" } ] } ================================================ FILE: gt-elasticsearch-process/LGPL ================================================ GNU LESSER GENERAL PUBLIC LICENSE Version 2.1, February 1999 Copyright (C) 1991, 1999 Free Software Foundation, Inc. 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Libraries If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. , 1 April 1990 Ty Coon, President of Vice That's all there is to it! ================================================ FILE: gt-elasticsearch-process/LICENSE ================================================ This module is licensed under the terms of the GNU Lesser General Public License (LGPL), version 2 or later. The directory containing this file should also contain a copy of the LGPL, as a file named LGPL. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. ================================================ FILE: gt-elasticsearch-process/pom.xml ================================================ 4.0.0 elasticgeo mil.nga.giat 2.16-SNAPSHOT gt-elasticsearch-process 2.16-SNAPSHOT jar GeoTools Elasticsearch Processes com.fasterxml.jackson.core jackson-core ${jackson.version} com.fasterxml.jackson.core jackson-databind ${jackson.version} com.github.davidmoten geo 0.7.4 org.geotools gt-process ${geotools.version} provided org.geoserver.extension gs-wps-core ${geoserver.version} provided org.geotools gt-grid ${geotools.version} provided org.geotools gt-process-feature ${geotools.version} provided org.geotools gt-sample-data ${geotools.version} test org.geotools gt-property ${geotools.version} test org.geotools gt-epsg-hsql ${geotools.version} test junit junit 4.11 test ================================================ FILE: gt-elasticsearch-process/src/main/java/mil/nga/giat/process/elasticsearch/BBOXRemovingFilterVisitor.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import org.geotools.filter.visitor.DuplicatingFilterVisitor; import org.opengis.filter.Filter; import org.opengis.filter.spatial.BBOX; class BBOXRemovingFilterVisitor extends DuplicatingFilterVisitor { private String geometryPropertyName; @Override public Object visit(BBOX filter, Object extraData) { geometryPropertyName = filter.getExpression1().toString(); return Filter.INCLUDE; } public String getGeometryPropertyName() { return geometryPropertyName; } } ================================================ FILE: gt-elasticsearch-process/src/main/java/mil/nga/giat/process/elasticsearch/BasicGeoHashGrid.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import java.util.Map; public class BasicGeoHashGrid extends GeoHashGrid { @Override public Number computeCellValue(Map bucket) { return super.pluckDocCount(bucket); } } ================================================ FILE: gt-elasticsearch-process/src/main/java/mil/nga/giat/process/elasticsearch/GeoHashGrid.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.logging.Logger; import org.geotools.coverage.CoverageFactoryFinder; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.coverage.grid.GridCoverageFactory; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.data.simple.SimpleFeatureIterator; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.geotools.util.factory.GeoTools; import org.geotools.util.logging.Logging; import org.locationtech.jts.geom.Envelope; import org.opengis.feature.simple.SimpleFeature; import org.opengis.referencing.FactoryException; import org.opengis.referencing.operation.TransformException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.davidmoten.geo.GeoHash; import com.github.davidmoten.geo.LatLong; abstract class GeoHashGrid { private final static Logger LOGGER = Logging.getLogger(GeoHashGrid.class); private static final int DEFAULT_PRECISION = 2; public static final String BUCKET_NAME_KEY = "key"; public static final String BUCKETS_KEY = "buckets"; public static final String DOC_COUNT_KEY = "doc_count"; public static final String VALUE_KEY = "value"; private double cellWidth; private double cellHeight; private double lonOffset; private Envelope envelope; private ReferencedEnvelope boundingBox; private float emptyCellValue; private float[][] grid; private RasterScale scale; GeoHashGrid() { this.emptyCellValue = 0; this.scale = new RasterScale(); } public void initalize(ReferencedEnvelope srcEnvelope, SimpleFeatureCollection features) throws TransformException, FactoryException { final List> buckets = readFeatures(features); final String firstGeohash = buckets.isEmpty() ? null : (String) buckets.get(0).get("key"); final int precision; if (!isValid(firstGeohash)) { LOGGER.fine("No aggregations found or missing/invalid geohash key"); precision = DEFAULT_PRECISION; } else { precision = ((String) buckets.get(0).get("key")).length(); } cellWidth = GeoHash.widthDegrees(precision); cellHeight = GeoHash.heightDegrees(precision); if (srcEnvelope.getCoordinateReferenceSystem() != null) { srcEnvelope = srcEnvelope.transform(DefaultGeographicCRS.WGS84,false); } computeMinLonOffset(srcEnvelope); envelope = computeEnvelope(srcEnvelope, precision); boundingBox = new ReferencedEnvelope(envelope.getMinX()-cellWidth/2.0, envelope.getMaxX()+cellWidth/2.0, envelope.getMinY()-cellHeight/2.0, envelope.getMaxY()+cellHeight/2.0, DefaultGeographicCRS.WGS84); final int numCol = (int) Math.round((envelope.getMaxX()-envelope.getMinX())/cellWidth+1); final int numRow = (int) Math.round((envelope.getMaxY()-envelope.getMinY())/cellHeight+1); grid = new float[numRow][numCol]; LOGGER.fine("Created grid with size (" + numCol + ", " + numRow + ")"); if (emptyCellValue != 0) { for (float[] row: grid) Arrays.fill(row, emptyCellValue); } List cells = new ArrayList<>(); buckets.forEach(bucket -> { Number rasterValue = computeCellValue(bucket); cells.add(new GridCell((String) bucket.get("key"), rasterValue)); scale.prepareScale(rasterValue.floatValue()); }); cells.forEach(cell -> updateGrid(cell.getGeohash(), cell.getValue())); LOGGER.fine("Read " + cells.size() + " aggregation buckets"); } protected abstract Number computeCellValue(Map bucket); private void updateGrid(String geohash, Number value) { if (geohash != null && value != null) { final LatLong latLon = GeoHash.decodeHash(geohash); final double lat = latLon.getLat(); double lon = latLon.getLon() + lonOffset; if (isValid(lat, lon-360)) updateGrid(lat, lon-360, value); if (isValid(lat, lon)) updateGrid(lat, lon, value); while (isValid(lat, lon+=360)) { updateGrid(lat, lon, value); } } } private void updateGrid(double lat, double lon, Number value) { final int row = grid.length-(int) Math.round((lat-envelope.getMinY())/cellHeight)-1; final int col = (int) Math.round((lon-envelope.getMinX())/cellWidth); grid[Math.min(row,grid.length-1)][Math.min(col,grid[0].length-1)] = scale.scaleValue(value.floatValue()); } public GridCoverage2D toGridCoverage2D() { final GridCoverageFactory coverageFactory = CoverageFactoryFinder.getGridCoverageFactory(GeoTools.getDefaultHints()); return coverageFactory.create("geohashGridAgg", grid, boundingBox); } private List> readFeatures(SimpleFeatureCollection features) { final ObjectMapper mapper = new ObjectMapper(); final List> buckets = new ArrayList<>(); try (SimpleFeatureIterator iterator = features.features()) { while (iterator.hasNext()) { final SimpleFeature feature = iterator.next(); if (feature.getAttribute("_aggregation") != null) { final byte[] data = (byte[]) feature.getAttribute("_aggregation"); try { final Map aggregation = mapper.readValue(data, new TypeReference>() {}); buckets.add(aggregation); } catch (IOException e) { LOGGER.fine("Failed to parse aggregation value: " + e); } } } } return buckets; } private Envelope computeEnvelope(ReferencedEnvelope outEnvelope, int precision) { final String minHash = GeoHash.encodeHash(Math.max(-90,outEnvelope.getMinY()), outEnvelope.getMinX(), precision); final LatLong minLatLon = GeoHash.decodeHash(minHash); final double minLon = minLatLon.getLon() + lonOffset; final double width = Math.ceil(outEnvelope.getWidth()/cellWidth)*cellWidth; final double maxLon = minLon + width - cellWidth; final String maxHash = GeoHash.encodeHash(Math.min(90, outEnvelope.getMaxY()), maxLon, precision); final LatLong maxLatLon = GeoHash.decodeHash(maxHash); return new Envelope(minLon, maxLon, minLatLon.getLat(), maxLatLon.getLat()); } private void computeMinLonOffset(ReferencedEnvelope env) { double minLon; if (env.getMinX() > 180) { minLon = env.getMinX() % 360; } else if (env.getMinX() < -180) { minLon = 360 - Math.abs(env.getMinX()) % 360; } else { minLon = env.getMinX() % 360; } if (minLon > 180) { minLon -= 360; } lonOffset = env.getMinX() - minLon; } private boolean isValid(final double lat, final double lon) { return lon>=envelope.getMinX() && lon<=envelope.getMaxX() && lat>=envelope.getMinY() && lat<=envelope.getMaxY(); } private boolean isValid(String geohash) { return geohash != null && GeoHash.encodeHash(GeoHash.decodeHash(geohash), geohash.length()).equals(geohash); } String pluckBucketName(Map bucket) { if (!bucket.containsKey(BUCKET_NAME_KEY)) { LOGGER.warning("Unable to pluck key, bucket does not contain required field:" + BUCKET_NAME_KEY); throw new IllegalArgumentException(); } return bucket.get(BUCKET_NAME_KEY) + ""; } Number pluckDocCount(Map bucket) { if (!bucket.containsKey(DOC_COUNT_KEY)) { LOGGER.warning("Unable to pluck document count, bucket does not contain required key:" + DOC_COUNT_KEY); throw new IllegalArgumentException(); } return (Number) bucket.get(DOC_COUNT_KEY); } Number pluckMetricValue(Map bucket, String metricKey, String valueKey) { Number value; if (null == metricKey || metricKey.trim().length() == 0) { value = pluckDocCount(bucket); } else { if (!bucket.containsKey(metricKey)) { LOGGER.warning("Unable to pluck metric, bucket does not contain required key:" + metricKey); throw new IllegalArgumentException(); } @SuppressWarnings("unchecked") Map metric = (Map) bucket.get(metricKey); if (!metric.containsKey(valueKey)) { LOGGER.warning("Unable to pluck value, metric does not contain required key:" + valueKey); throw new IllegalArgumentException(); } value = (Number) metric.get(valueKey); } return value; } @SuppressWarnings("unchecked") List> pluckAggBuckets(Map parentBucket, String aggKey) { if (!parentBucket.containsKey(aggKey)) { LOGGER.warning("Unable to pluck aggregation results, parent bucket does not contain required key:" + aggKey); throw new IllegalArgumentException(); } Map aggResults = (Map) parentBucket.get(aggKey); if (!aggResults.containsKey(BUCKETS_KEY)) { LOGGER.warning("Unable to pluck buckets, aggregation results bucket does not contain required key:" + BUCKETS_KEY); throw new IllegalArgumentException(); } return (List>) aggResults.get(BUCKETS_KEY); } public void setParams(List params) { //ignore params } public void setEmptyCellValue(Float value) { if (null != value) { this.emptyCellValue = value; } } public double getCellWidth() { return cellWidth; } public double getCellHeight() { return cellHeight; } public Envelope getEnvelope() { return envelope; } public ReferencedEnvelope getBoundingBox() { return boundingBox; } public float[][] getGrid() { return grid; } public void setScale(RasterScale scale) { this.scale = scale; } } ================================================ FILE: gt-elasticsearch-process/src/main/java/mil/nga/giat/process/elasticsearch/GeoHashGridProcess.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import java.util.ArrayList; import java.util.List; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.coverage.processing.Operations; import org.geotools.data.Query; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.factory.CommonFactoryFinder; import org.geotools.filter.visitor.SimplifyingFilterVisitor; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.process.ProcessException; import org.geotools.process.factory.DescribeParameter; import org.geotools.process.factory.DescribeProcess; import org.geotools.process.factory.DescribeResult; import org.geotools.process.vector.VectorProcess; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.opengis.coverage.grid.GridGeometry; import org.opengis.filter.Filter; import org.opengis.filter.FilterFactory; import org.opengis.filter.expression.PropertyName; import org.opengis.filter.spatial.BBOX; import org.opengis.util.ProgressListener; @SuppressWarnings("unused") @DescribeProcess(title = "geoHashGridAgg", description = "Computes a grid from GeoHash grid aggregation buckets with values corresponding to doc_count values.") public class GeoHashGridProcess implements VectorProcess { private final static FilterFactory FILTER_FACTORY = CommonFactoryFinder.getFilterFactory(null); public enum Strategy { BASIC(BasicGeoHashGrid.class), METRIC(MetricGeoHashGrid.class), NESTED_AGG(NestedAggGeoHashGrid.class); private final Class clazz; Strategy(Class clazz) { this.clazz = clazz; } GeoHashGrid createNewInstance() throws ReflectiveOperationException { return clazz.getConstructor().newInstance(); } } @DescribeResult(description = "Output raster") public GridCoverage2D execute( // process data @DescribeParameter(name = "data", description = "Input features") SimpleFeatureCollection obsFeatures, // process parameters @DescribeParameter(name = "pixelsPerCell", description = "Resolution used for upsampling (in pixels)", defaultValue="1", min = 1) Integer argPixelsPerCell, @DescribeParameter(name = "gridStrategy", description = "GeoHash grid strategy", defaultValue="Basic", min = 1) String gridStrategy, @DescribeParameter(name = "gridStrategyArgs", description = "Grid strategy arguments", min = 0) List gridStrategyArgs, @DescribeParameter(name = "emptyCellValue", description = "Default cell value", min = 0) Float emptyCellValue, @DescribeParameter(name = "scaleMin", description = "Scale minimum", defaultValue="0") Float scaleMin, @DescribeParameter(name = "scaleMax", description = "Scale maximum", min = 0) Float scaleMax, @DescribeParameter(name = "useLog", description = "Whether to use log values (default=false)", defaultValue="false") Boolean useLog, // output image parameters @DescribeParameter(name = "outputBBOX", description = "Bounding box of the output") ReferencedEnvelope argOutputEnv, @DescribeParameter(name = "outputWidth", description = "Width of output raster in pixels") Integer argOutputWidth, @DescribeParameter(name = "outputHeight", description = "Height of output raster in pixels") Integer argOutputHeight, ProgressListener monitor) throws ProcessException { try { // construct and populate grid final GeoHashGrid geoHashGrid = Strategy.valueOf(gridStrategy.toUpperCase()).createNewInstance(); geoHashGrid.setParams(gridStrategyArgs); geoHashGrid.setEmptyCellValue(emptyCellValue); geoHashGrid.setScale(new RasterScale(scaleMin, scaleMax, useLog)); geoHashGrid.initalize(argOutputEnv, obsFeatures); // convert to grid coverage final GridCoverage2D nativeCoverage = geoHashGrid.toGridCoverage2D(); // reproject final GridCoverage2D transformedCoverage = (GridCoverage2D) Operations.DEFAULT.resample(nativeCoverage, argOutputEnv.getCoordinateReferenceSystem()); // upscale to approximate output resolution final GridCoverage2D scaledCoverage = GridCoverageUtil.scale(transformedCoverage, argOutputWidth*argPixelsPerCell, argOutputHeight*argPixelsPerCell); // crop (geohash grid envelope will always contain output bbox) final GridCoverage2D croppedCoverage = GridCoverageUtil.crop(scaledCoverage, argOutputEnv); return GridCoverageUtil.scale(croppedCoverage, argOutputWidth, argOutputHeight); } catch (Exception e) { throw new ProcessException("Error executing GeoHashGridProcess", e); } } public Query invertQuery( @DescribeParameter(name = "outputBBOX", description = "Georeferenced bounding box of the output") ReferencedEnvelope envelope, Query targetQuery, GridGeometry targetGridGeometry ) throws ProcessException { final BBOXRemovingFilterVisitor visitor = new BBOXRemovingFilterVisitor(); Filter filter = (Filter) targetQuery.getFilter().accept(visitor, null); final String geometryName = visitor.getGeometryPropertyName(); if (geometryName != null) { final BBOX bbox; try { if (envelope.getCoordinateReferenceSystem() != null) { envelope = envelope.transform(DefaultGeographicCRS.WGS84,false); } bbox = FILTER_FACTORY.bbox(geometryName, envelope.getMinX(), envelope.getMinY(), envelope.getMaxX(), envelope.getMaxY(), "EPSG:4326"); } catch (Exception e) { throw new ProcessException("Unable to create bbox filter for feature source", e); } filter = (Filter) FILTER_FACTORY.and(filter, bbox).accept(new SimplifyingFilterVisitor(), null); targetQuery.setFilter(filter); } final List properties = new ArrayList<>(); properties.add(FILTER_FACTORY.property("_aggregation")); targetQuery.setProperties(properties); return targetQuery; } } ================================================ FILE: gt-elasticsearch-process/src/main/java/mil/nga/giat/process/elasticsearch/GridCell.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; class GridCell { private final String geohash; private final Number value; public GridCell(String geohash, Number value) { this.geohash = geohash; this.value = value; } public String getGeohash() { return geohash; } public Number getValue() { return value; } } ================================================ FILE: gt-elasticsearch-process/src/main/java/mil/nga/giat/process/elasticsearch/GridCoverageUtil.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import java.awt.image.Raster; import java.awt.image.RenderedImage; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.coverage.processing.CoverageProcessor; import org.geotools.coverage.processing.Operations; import org.geotools.geometry.GeneralEnvelope; import org.opengis.geometry.Envelope; import org.opengis.parameter.ParameterValueGroup; class GridCoverageUtil { public static GridCoverage2D scale(GridCoverage2D coverage, float width, float height) { final RenderedImage renderedImage = coverage.getRenderedImage(); final Raster renderedGrid = renderedImage.getData(); float yScale = width/renderedGrid.getWidth(); float xScale = height/renderedGrid.getHeight(); final Operations ops = new Operations(null); return (GridCoverage2D) ops.scale(coverage, xScale, yScale, 0, 0); } public static GridCoverage2D crop(GridCoverage2D coverage, Envelope envelope) { final CoverageProcessor processor = new CoverageProcessor(); final ParameterValueGroup param = processor.getOperation("CoverageCrop").getParameters(); final GeneralEnvelope crop = new GeneralEnvelope(envelope); param.parameter("Source").setValue( coverage ); param.parameter("Envelope").setValue( crop ); return (GridCoverage2D) processor.doOperation(param); } } ================================================ FILE: gt-elasticsearch-process/src/main/java/mil/nga/giat/process/elasticsearch/MetricGeoHashGrid.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import java.util.List; import java.util.Map; public class MetricGeoHashGrid extends GeoHashGrid { public final static String DEFAULT_METRIC_KEY = "metric"; private final static int METRIC_KEY_INDEX = 0; private final static int VALUE_KEY_INDEX = 1; private String metricKey = DEFAULT_METRIC_KEY; private String valueKey = GeoHashGrid.VALUE_KEY; @Override public void setParams(List params) { if (null != params) { if (params.size() >= 1) { metricKey = params.get(METRIC_KEY_INDEX); } if (params.size() >= 2) { valueKey = params.get(VALUE_KEY_INDEX); } } } @Override public Number computeCellValue(Map bucket) { return super.pluckMetricValue(bucket, metricKey, valueKey); } public String getMetricKey() { return metricKey; } public String getValueKey() { return valueKey; } } ================================================ FILE: gt-elasticsearch-process/src/main/java/mil/nga/giat/process/elasticsearch/NestedAggGeoHashGrid.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Logger; import org.geotools.util.logging.Logging; public class NestedAggGeoHashGrid extends GeoHashGrid { private final static Logger LOGGER = Logging.getLogger(NestedAggGeoHashGrid.class); private final static int NESTED_KEY_INDEX = 0; private final static int METRIC_KEY_INDEX = 1; private final static int VALUE_KEY_INDEX = 2; private final static int SELECTION_STRATEGY_INDEX = 3; private final static int RASTER_STRATEGY_INDEX = 4; private final static int TERMS_MAP_INDEX = 5; final static String SELECT_LARGEST = "largest"; final static String SELECT_SMALLEST = "smallest"; final static String RASTER_FROM_VALUE = "value"; final static String RASTER_FROM_KEY = "key"; final static String DEFAULT_AGG_KEY = "nested"; final static String DEFAULT_METRIC_KEY = ""; private String nestedAggKey = DEFAULT_AGG_KEY; private String metricKey = DEFAULT_METRIC_KEY; private String valueKey = GeoHashGrid.VALUE_KEY; private String selectionStrategy = SELECT_LARGEST; private String rasterStrategy = RASTER_FROM_VALUE; private Map termsMap = null; @Override public void setParams(List params) { if (null != params) { if (params.size() < 5) { LOGGER.warning("Parameters list does not contain required length; you provided " + params.size() + ", expecting: 5 or more"); throw new IllegalArgumentException(); } nestedAggKey = params.get(NESTED_KEY_INDEX); metricKey = params.get(METRIC_KEY_INDEX); valueKey = params.get(VALUE_KEY_INDEX); switch (params.get(SELECTION_STRATEGY_INDEX)) { case SELECT_SMALLEST: selectionStrategy = params.get(SELECTION_STRATEGY_INDEX); break; case SELECT_LARGEST: selectionStrategy = params.get(SELECTION_STRATEGY_INDEX); break; default: LOGGER.warning("Unexpected buckets selection strategy parameter; you provided " + params.get(SELECTION_STRATEGY_INDEX) + ", defaulting to: " + selectionStrategy); } switch (params.get(RASTER_STRATEGY_INDEX)) { case RASTER_FROM_VALUE: rasterStrategy = params.get(RASTER_STRATEGY_INDEX); break; case RASTER_FROM_KEY: rasterStrategy = params.get(RASTER_STRATEGY_INDEX); break; default: LOGGER.warning("Unexpected raster strategy parameter; you provided " + params.get(RASTER_STRATEGY_INDEX) + ", defaulting to: " + rasterStrategy); } if (rasterStrategy.equals(RASTER_FROM_KEY) && params.size() >= 6) { termsMap = new HashMap<>(); String[] terms = params.get(TERMS_MAP_INDEX).split(";"); for (String term : terms) { String[] keyValueSplit = term.split(":"); if (keyValueSplit.length != 2) { LOGGER.warning("Term " + term + " does not contain required format :"); throw new IllegalArgumentException(); } termsMap.put(keyValueSplit[0], new Integer(keyValueSplit[1])); } } } } @Override public Number computeCellValue(Map geogridBucket) { List> aggBuckets = super.pluckAggBuckets(geogridBucket, nestedAggKey); Number rasterValue = 0; switch (selectionStrategy) { case SELECT_SMALLEST: rasterValue = selectSmallest(aggBuckets); break; case SELECT_LARGEST: rasterValue = selectLargest(aggBuckets); break; } return rasterValue; } Number selectLargest(List> buckets) { String largestKey = pluckBucketName(buckets.get(0)); Number largestValue = super.pluckMetricValue(buckets.get(0), metricKey, valueKey); for (Map bucket : buckets) { Number value = super.pluckMetricValue(bucket, metricKey, valueKey); if (value.doubleValue() > largestValue.doubleValue()) { largestKey = super.pluckBucketName(bucket); largestValue = value; } } return bucketToRaster(largestKey, largestValue); } Number selectSmallest(List> buckets) { String smallestKey = pluckBucketName(buckets.get(0)); Number smallestValue = super.pluckMetricValue(buckets.get(0), metricKey, valueKey); for (Map bucket : buckets) { Number value = super.pluckMetricValue(bucket, metricKey, valueKey); if (value.doubleValue() < smallestValue.doubleValue()) { smallestKey = super.pluckBucketName(bucket); smallestValue = value; } } return bucketToRaster(smallestKey, smallestValue); } Number bucketToRaster(String key, Number value) { Number rasterValue = value; if (rasterStrategy.equals(RASTER_FROM_KEY)) { if (null != termsMap) { if (termsMap.containsKey(key)) { rasterValue = termsMap.get(key); } else { LOGGER.warning("Cannot convert key (String) to raster value, mapping does not contain key " + key + ". Add key to terms_map argument to resolve."); throw new IllegalArgumentException(); } } else { try { rasterValue = Double.valueOf(key); } catch (NumberFormatException nfe) { LOGGER.warning("Cannot convert key (String) to raster value, key, " + key + ", is not a number. Use terms_map argument to map Strings to Numbers."); throw new IllegalArgumentException(); } } } return rasterValue; } public String getNestedAggKey() { return nestedAggKey; } public String getMetricKey() { return metricKey; } public String getValueKey() { return valueKey; } public String getSelectionStrategy() { return selectionStrategy; } public String getRasterStrategy() { return rasterStrategy; } public Map getTermsMap() { return termsMap; } } ================================================ FILE: gt-elasticsearch-process/src/main/java/mil/nga/giat/process/elasticsearch/RasterScale.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; class RasterScale { private static final float DEFAULT_SCALE_MIN = 0f; private final Float scaleMin; private final Float scaleMax; private final boolean scaleLog; private Float dataMin; private Float dataMax; public RasterScale() { this(null, null, false); } public RasterScale(boolean useLog) { this(null, null, useLog); } public RasterScale(Float scaleMax) { this(DEFAULT_SCALE_MIN, scaleMax, false); } public RasterScale(Float scaleMin, Float scaleMax) { this(scaleMin, scaleMax, false); } public RasterScale(Float scaleMin, Float scaleMax, boolean scaleLog) { this.scaleMin = scaleMin; this.scaleMax = scaleMax; this.scaleLog = scaleLog; if (scaleMax != null && (scaleMin == null || scaleMax.floatValue() == scaleMin)) { throw new IllegalArgumentException(); } } public float scaleValue(float value) { if (scaleLog && value > 0) { value = (float) Math.log10(value); } if (scaleMax == null) { return value; } else if (dataMax.floatValue() == dataMin) { return scaleMax; } else { return ((scaleMax - scaleMin) * (value - dataMin) / (dataMax - dataMin)) + scaleMin; } } public void prepareScale(float value) { if (scaleLog && value > 0) { value = (float) Math.log10(value); } if (scaleMax != null && dataMin != null) { if (value < dataMin) { dataMin = value; } if (value > dataMax) { dataMax = value; } } else if (scaleMax != null) { dataMin = value; dataMax = value; } } public boolean isScaleSet() { return scaleMax != null; } public Float getScaleMin() { return scaleMin; } public Float getScaleMax() { return scaleMax; } } ================================================ FILE: gt-elasticsearch-process/src/main/resources/META-INF/services/org.geotools.process.ProcessFactory ================================================ org.geotools.process.vector.VectorProcessFactory ================================================ FILE: gt-elasticsearch-process/src/main/resources/META-INF/services/org.geotools.process.vector.VectorProcess ================================================ mil.nga.giat.process.elasticsearch.GeoHashGridProcess ================================================ FILE: gt-elasticsearch-process/src/test/java/mil/nga/giat/process/elasticsearch/GeoHashGridProcessTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import static org.junit.Assert.*; import java.awt.geom.Point2D; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.data.Query; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.factory.CommonFactoryFinder; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.CRS; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.junit.Before; import org.junit.Test; import org.locationtech.jts.geom.Envelope; import org.opengis.filter.Filter; import org.opengis.filter.FilterFactory; import org.opengis.referencing.crs.CRSAuthorityFactory; import org.opengis.referencing.crs.CoordinateReferenceSystem; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.davidmoten.geo.GeoHash; import com.github.davidmoten.geo.LatLong; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; public class GeoHashGridProcessTest { private SimpleFeatureCollection features; private double fineDelta; private GeoHashGridProcess process; private FilterFactory ff; @Before public void setup() throws JsonProcessingException { ObjectMapper mapper = new ObjectMapper(); features = TestUtil.createAggregationFeatures(ImmutableList.of( ImmutableMap.of("_aggregation", mapper.writeValueAsBytes(ImmutableMap.of("key",GeoHash.encodeHash(new LatLong(-89.9,-179.9),1),"doc_count",10))), ImmutableMap.of("_aggregation", mapper.writeValueAsBytes(ImmutableMap.of("key",GeoHash.encodeHash(new LatLong(0.1,0.1),1),"doc_count",10))), ImmutableMap.of("_aggregation", mapper.writeValueAsBytes(ImmutableMap.of("key",GeoHash.encodeHash(new LatLong(89.9,179.9),1),"doc_count",10))) )); fineDelta = 0.45; ff = CommonFactoryFinder.getFilterFactory(null); process = new GeoHashGridProcess(); } @Test public void testBasic() { ReferencedEnvelope envelope = new ReferencedEnvelope(-180,180,-90,90,DefaultGeographicCRS.WGS84); int width = 8; int height = 4; int pixelsPerCell = 1; String strategy = "Basic"; Float scaleMin = 0f; GridCoverage2D coverage = process.execute(features, pixelsPerCell, strategy, null, null, scaleMin, null, false, envelope, width, height, null); checkInternal(coverage, fineDelta); checkEdge(coverage, envelope, fineDelta); } @Test public void testScaled() { ReferencedEnvelope envelope = new ReferencedEnvelope(-180,180,-90,90,DefaultGeographicCRS.WGS84); int width = 16; int height = 8; int pixelsPerCell = 1; String strategy = "Basic"; Float scaleMin = 0f; GridCoverage2D coverage = process.execute(features, pixelsPerCell, strategy, null, null, scaleMin, null, false, envelope, width, height, null); checkInternal(coverage, fineDelta); checkEdge(coverage, envelope, fineDelta); } @Test public void testSubCellCrop() { ReferencedEnvelope envelope = new ReferencedEnvelope(-168.75,168.75,-78.75,78.75,DefaultGeographicCRS.WGS84); int width = 16; int height = 8; int pixelsPerCell = 1; String strategy = "Basic"; Float scaleMin = 0f; GridCoverage2D coverage = process.execute(features, pixelsPerCell, strategy, null, null, scaleMin, null, false, envelope, width, height, null); checkInternal(coverage, fineDelta); checkEdge(coverage, envelope, fineDelta); } @Test public void testSubCellCropWithSheer() { ReferencedEnvelope envelope = new ReferencedEnvelope(-168.75,168.75,-78.75,78.75,DefaultGeographicCRS.WGS84); int width = 900; int height = 600; int pixelsPerCell = 1; String strategy = "Basic"; Float scaleMin = 0f; GridCoverage2D coverage = process.execute(features, pixelsPerCell, strategy, null, null, scaleMin, null, false, envelope, width, height, null); checkInternal(coverage, fineDelta); } @Test public void testInvertQuery() { Filter filter = ff.bbox("geom", 0, 0, 0, 0, "EPSG:4326"); ReferencedEnvelope env = new ReferencedEnvelope(0,1,2,3,DefaultGeographicCRS.WGS84); Query query = new Query(); query.setFilter(filter); Query queryOut = process.invertQuery(env, query, null); assertEquals(ff.bbox("geom", 0, 2, 1, 3, "EPSG:4326"), queryOut.getFilter()); } @Test public void testInvertQueryAcrossDateline() { Filter filter = ff.bbox("geom", 0, 0, 0, 0, "EPSG:4326"); ReferencedEnvelope env = new ReferencedEnvelope(-179,179,2,3,DefaultGeographicCRS.WGS84); Query query = new Query(); query.setFilter(filter); Query queryOut = process.invertQuery(env, query, null); assertEquals(ff.bbox("geom", -179, 2, 179, 3, "EPSG:4326"), queryOut.getFilter()); } @Test public void testInvertQueryNorthEastAxisOrder() throws Exception { Filter filter = ff.bbox("geom", 0, 0, 0, 0, "EPSG:4326"); CRSAuthorityFactory factory = CRS.getAuthorityFactory(false); CoordinateReferenceSystem crs = factory.createCoordinateReferenceSystem("EPSG:4326"); ReferencedEnvelope env = new ReferencedEnvelope(2,3,0,1,crs); Query query = new Query(); query.setFilter(filter); Query queryOut = process.invertQuery(env, query, null); assertEquals(ff.bbox("geom", 0, 2, 1, 3, "EPSG:4326"), queryOut.getFilter()); } @Test public void testInvertQueryWithOtherFilterElement() { Filter filter = ff.and(ff.equals(ff.property("key"), ff.literal("value")), ff.bbox("geom", 0, 0, 0, 0, "EPSG:4326")); ReferencedEnvelope env = new ReferencedEnvelope(0,1,2,3,DefaultGeographicCRS.WGS84); Query query = new Query(); query.setFilter(filter); Query queryOut = process.invertQuery(env, query, null); assertEquals(ff.and(ff.equals(ff.property("key"), ff.literal("value")), ff.bbox("geom", 0, 2, 1, 3, "EPSG:4326")), queryOut.getFilter()); } private void checkInternal(GridCoverage2D coverage, double delta) { assertEquals(10, coverage.evaluate(new Point2D.Double(-135-delta, -45-delta), new float[1])[0],1e-10); assertEquals(0, coverage.evaluate(new Point2D.Double(-135+delta, -45+delta), new float[1])[0],1e-10); assertEquals(0, coverage.evaluate(new Point2D.Double(-delta, -delta), new float[1])[0],1e-10); assertEquals(10, coverage.evaluate(new Point2D.Double(delta, delta), new float[1])[0],1e-10); assertEquals(10, coverage.evaluate(new Point2D.Double(45-delta, 45-delta), new float[1])[0],1e-10); assertEquals(0, coverage.evaluate(new Point2D.Double(45+delta, 45+delta), new float[1])[0],1e-10); assertEquals(10, coverage.evaluate(new Point2D.Double(135+delta, 45+delta), new float[1])[0],1e-10); assertEquals(0, coverage.evaluate(new Point2D.Double(135-delta, 45-delta), new float[1])[0],1e-10); } private void checkEdge(GridCoverage2D coverage, Envelope env, double delta) { assertEquals(10, coverage.evaluate(new Point2D.Double(env.getMinX()+delta, env.getMinY()+delta), new float[1])[0],1e-10); assertEquals(10, coverage.evaluate(new Point2D.Double(env.getMaxX()-delta, env.getMaxY()-delta), new float[1])[0],1e-10); } } ================================================ FILE: gt-elasticsearch-process/src/test/java/mil/nga/giat/process/elasticsearch/GeoHashGridTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import static org.junit.Assert.*; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.stream.IntStream; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.feature.DefaultFeatureCollection; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.referencing.CRS; import org.geotools.referencing.crs.DefaultGeographicCRS; import org.junit.Before; import org.junit.Test; import org.locationtech.jts.geom.Envelope; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.davidmoten.geo.GeoHash; import com.github.davidmoten.geo.LatLong; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; public class GeoHashGridTest { private static final String BUCKET_NAME = "bucket1"; private static final int DOC_COUNT = 11; private static final int VALUE = 25; private static final String METRIC_KEY = "metric_key"; private static final String VALUE_KEY = "value_key"; private static final String AGG_KEY = "nested_agg"; private static final int[] AGG_RESULTS = {1, 2, 3, 4, 5}; private static final Map SIMPLE_BUCKET = TestUtil.createDocCountBucket(BUCKET_NAME, DOC_COUNT); private static final Map METRIC_BUCKET = TestUtil.createMetricBucket(DOC_COUNT, METRIC_KEY, VALUE_KEY, VALUE); private static final Map AGG_BUCKET = TestUtil.createAggBucket(AGG_KEY, AGG_RESULTS); private SimpleFeatureCollection features; private GeoHashGrid geohashGrid; private ObjectMapper mapper; @Before public void setup() { this.geohashGrid = new BasicGeoHashGrid(); this.mapper = new ObjectMapper(); } @Test public void testGeoHashGridLowLonRange() throws Exception { features = TestUtil.createAggregationFeatures(ImmutableList.of( ImmutableMap.of("_aggregation", mapper.writeValueAsBytes(ImmutableMap.of("key",GeoHash.encodeHash(new LatLong(-89.9,-179.9),1),"doc_count",10))) )); ReferencedEnvelope envelope = new ReferencedEnvelope(-360,180,-90,90,DefaultGeographicCRS.WGS84); geohashGrid.initalize(envelope, features); assertEquals(GeoHash.widthDegrees(1), geohashGrid.getCellWidth(), 1e-10); assertEquals(GeoHash.heightDegrees(1), geohashGrid.getCellHeight(), 1e-10); assertEquals(new Envelope(-360+GeoHash.widthDegrees(1)/2.,180-GeoHash.widthDegrees(1)/2.,-90+GeoHash.heightDegrees(1)/2.,90-GeoHash.heightDegrees(1)/2.), geohashGrid.getEnvelope()); int ny = (int) Math.round(180/geohashGrid.getCellHeight()); int nx = (int) Math.round(540/GeoHash.widthDegrees(1)); assertEquals(ny, geohashGrid.getGrid().length); assertEquals(nx, geohashGrid.getGrid()[0].length); } @Test public void testGeoHashGridHighLonRange() throws Exception { features = TestUtil.createAggregationFeatures(ImmutableList.of( ImmutableMap.of("_aggregation", mapper.writeValueAsBytes(ImmutableMap.of("key",GeoHash.encodeHash(new LatLong(-89.9,-179.9),1),"doc_count",10))) )); ReferencedEnvelope envelope = new ReferencedEnvelope(360,540,-90,90,DefaultGeographicCRS.WGS84); geohashGrid.initalize(envelope, features); assertEquals(GeoHash.widthDegrees(1), geohashGrid.getCellWidth(), 1e-10); assertEquals(GeoHash.heightDegrees(1), geohashGrid.getCellHeight(), 1e-10); assertEquals(new Envelope(360+GeoHash.widthDegrees(1)/2.,540-GeoHash.widthDegrees(1)/2.,-90+GeoHash.heightDegrees(1)/2.,90-GeoHash.heightDegrees(1)/2.), geohashGrid.getEnvelope()); int ny = (int) Math.round(180/geohashGrid.getCellHeight()); int nx = (int) Math.round(180/GeoHash.widthDegrees(1)); assertEquals(ny, geohashGrid.getGrid().length); assertEquals(nx, geohashGrid.getGrid()[0].length); } @Test public void testGeoHashGrid() throws Exception { features = TestUtil.createAggregationFeatures(ImmutableList.of( ImmutableMap.of("_aggregation", mapper.writeValueAsBytes(ImmutableMap.of("key",GeoHash.encodeHash(new LatLong(-89.9,-179.9),1),"doc_count",10))) )); ReferencedEnvelope envelope = new ReferencedEnvelope(-180,180,-90,90,DefaultGeographicCRS.WGS84); geohashGrid.initalize(envelope, features); assertEquals(GeoHash.widthDegrees(1), geohashGrid.getCellWidth(), 1e-10); assertEquals(GeoHash.heightDegrees(1), geohashGrid.getCellHeight(), 1e-10); assertEquals(new Envelope(-180+GeoHash.widthDegrees(1)/2.,180-GeoHash.widthDegrees(1)/2.,-90+GeoHash.heightDegrees(1)/2.,90-GeoHash.heightDegrees(1)/2.), geohashGrid.getEnvelope()); int ny = (int) Math.round(180/geohashGrid.getCellHeight()); int nx = (int) Math.round(360/GeoHash.widthDegrees(1)); float[][] expected = new float[ny][nx]; expected[ny-1][0] = 10; assertEquals(ny, geohashGrid.getGrid().length); assertEquals(nx, geohashGrid.getGrid()[0].length); IntStream.range(0, ny).forEach(i-> assertArrayEquals(expected[i], geohashGrid.getGrid()[i], 0.0f)); } @Test public void testGeoHashGrid_scaled() throws Exception { features = TestUtil.createAggregationFeatures(ImmutableList.of( ImmutableMap.of("_aggregation", mapper.writeValueAsBytes(ImmutableMap.of("key",GeoHash.encodeHash(new LatLong(-89.9,-179.9),1),"doc_count",20))), ImmutableMap.of("_aggregation", mapper.writeValueAsBytes(ImmutableMap.of("key",GeoHash.encodeHash(new LatLong(89.9,179.9),1),"doc_count",30))) )); ReferencedEnvelope envelope = new ReferencedEnvelope(-180,180,-90,90,DefaultGeographicCRS.WGS84); geohashGrid.setScale(new RasterScale(5f, 10f)); geohashGrid.initalize(envelope, features); assertEquals(GeoHash.widthDegrees(1), geohashGrid.getCellWidth(), 1e-10); assertEquals(GeoHash.heightDegrees(1), geohashGrid.getCellHeight(), 1e-10); assertEquals(new Envelope(-180+GeoHash.widthDegrees(1)/2.,180-GeoHash.widthDegrees(1)/2.,-90+GeoHash.heightDegrees(1)/2.,90-GeoHash.heightDegrees(1)/2.), geohashGrid.getEnvelope()); int ny = (int) Math.round(180/geohashGrid.getCellHeight()); int nx = (int) Math.round(360/GeoHash.widthDegrees(1)); assertEquals(ny, geohashGrid.getGrid().length); assertEquals(nx, geohashGrid.getGrid()[0].length); float[][] expected = new float[ny][nx]; expected[0][7] = 10; expected[ny-1][0] = 5; IntStream.range(0, ny).forEach(i-> assertArrayEquals(geohashGrid.getGrid()[i], expected[i], 0.0f)); } @Test public void testGeoHashGridWithProjectedEnvelope() throws Exception { features = TestUtil.createAggregationFeatures(ImmutableList.of( ImmutableMap.of("_aggregation", mapper.writeValueAsBytes(ImmutableMap.of("key",GeoHash.encodeHash(new LatLong(-89.9,-179.9),1),"doc_count",10))) )); ReferencedEnvelope envelope = new ReferencedEnvelope(-19926188.85,19926188.85,-30240971.96,30240971.96, CRS.decode("EPSG:3857")); geohashGrid.initalize(envelope, features); assertEquals(new ReferencedEnvelope(-180,180,-90,90,DefaultGeographicCRS.WGS84), geohashGrid.getBoundingBox()); } @Test public void testGeoHashGridWithNoFeatures() throws Exception { features = new DefaultFeatureCollection(); ReferencedEnvelope envelope = new ReferencedEnvelope(-180,180,-90,90,CRS.decode("EPSG:4326")); geohashGrid.initalize(envelope, features); IntStream.range(0, geohashGrid.getGrid().length).forEach(i-> assertArrayEquals(new float[geohashGrid.getGrid()[i].length], geohashGrid.getGrid()[i], 0.0f)); } @Test public void testGeoHashGrid_emptyCellValue() throws Exception { float emptyCellValue = -1.0f; features = new DefaultFeatureCollection(); ReferencedEnvelope envelope = new ReferencedEnvelope(-180,180,-90,90,CRS.decode("EPSG:4326")); geohashGrid.setEmptyCellValue(emptyCellValue); geohashGrid.initalize(envelope, features); IntStream.range(0, geohashGrid.getGrid().length).forEach(row-> IntStream.range(0, geohashGrid.getGrid()[row].length).forEach(column-> assertEquals(emptyCellValue, geohashGrid.getGrid()[row][column], 0.0))); } @Test public void testGeoHashGrid_nullEmptyCellValue() throws Exception { features = new DefaultFeatureCollection(); ReferencedEnvelope envelope = new ReferencedEnvelope(-180,180,-90,90,CRS.decode("EPSG:4326")); geohashGrid.setEmptyCellValue(null); geohashGrid.initalize(envelope, features); IntStream.range(0, geohashGrid.getGrid().length).forEach(row-> IntStream.range(0, geohashGrid.getGrid()[row].length).forEach(column-> assertEquals(0.0, geohashGrid.getGrid()[row][column], 0.0))); } @Test public void testGeoHashGridWithNoAggregations() throws Exception { features = TestUtil.createAggregationFeatures(ImmutableList.of( ImmutableMap.of("aString", UUID.randomUUID().toString()) )); ReferencedEnvelope envelope = new ReferencedEnvelope(-180,180,-90,90,CRS.decode("EPSG:4326")); geohashGrid.initalize(envelope, features); IntStream.range(0, geohashGrid.getGrid().length).forEach(i-> assertArrayEquals(new float[geohashGrid.getGrid()[i].length], geohashGrid.getGrid()[i], 0.0f)); } @Test(expected=IllegalArgumentException.class) public void testGeoHashGridWithNoDocCount() throws Exception { features = TestUtil.createAggregationFeatures(ImmutableList.of( ImmutableMap.of("_aggregation", mapper.writeValueAsBytes(ImmutableMap.of("key",GeoHash.encodeHash(new LatLong(-89.9,-179.9),1)))) )); ReferencedEnvelope envelope = new ReferencedEnvelope(-180,180,-90,90,CRS.decode("EPSG:4326")); geohashGrid.initalize(envelope, features); IntStream.range(0, geohashGrid.getGrid().length).forEach(i-> assertArrayEquals(new float[geohashGrid.getGrid()[i].length], geohashGrid.getGrid()[i], 0.0f)); } @Test public void testGeoHashGridWithInvalidGeohash() throws Exception { features = TestUtil.createAggregationFeatures(ImmutableList.of( ImmutableMap.of("_aggregation", mapper.writeValueAsBytes(ImmutableMap.of("key","invalid","doc_count",10))) )); ReferencedEnvelope envelope = new ReferencedEnvelope(-180,180,-90,90,CRS.decode("EPSG:4326")); geohashGrid.initalize(envelope, features); IntStream.range(0, geohashGrid.getGrid().length).forEach(i-> assertArrayEquals(new float[geohashGrid.getGrid()[i].length], geohashGrid.getGrid()[i], 0.0f)); } @Test public void testPluckBucketName() { String plucked = this.geohashGrid.pluckBucketName(SIMPLE_BUCKET); assertEquals(BUCKET_NAME, plucked); } @Test public void testPluckBucketName_doubleKey() { Map bucket = new HashMap<>(); bucket.put(GeoHashGrid.BUCKET_NAME_KEY, 2.0); bucket.put("doc_count", DOC_COUNT); String plucked = this.geohashGrid.pluckBucketName(bucket); assertEquals("2.0", plucked); } @Test(expected=IllegalArgumentException.class) public void testPluckBucketName_invalidKey() { Map bucket = new HashMap<>(); bucket.put("invalid", "invalid"); this.geohashGrid.pluckBucketName(bucket); } @Test public void testPluckDocCount() { Number plucked = this.geohashGrid.pluckDocCount(SIMPLE_BUCKET); assertEquals(DOC_COUNT, plucked); } @Test public void testPluckMetricValue() { Number plucked = this.geohashGrid.pluckMetricValue(METRIC_BUCKET, METRIC_KEY, VALUE_KEY); assertEquals(VALUE, plucked); } @Test public void testPluckMetricValue_docCount() { Number plucked = this.geohashGrid.pluckMetricValue(METRIC_BUCKET, null, null); assertEquals(DOC_COUNT, plucked); plucked = this.geohashGrid.pluckMetricValue(METRIC_BUCKET, "", null); assertEquals(DOC_COUNT, plucked); } @Test(expected=IllegalArgumentException.class) public void testPluckMetricValue_canNotFindMetricKey() { this.geohashGrid.pluckMetricValue(METRIC_BUCKET, "noGonnaFindMe", VALUE_KEY); } @Test(expected=IllegalArgumentException.class) public void testPluckMetricValue_canNotFindValueKey() { this.geohashGrid.pluckMetricValue(METRIC_BUCKET, METRIC_KEY, "noGonnaFindMe"); } @Test public void testPluckAggBuckets() { List> buckets = this.geohashGrid.pluckAggBuckets(AGG_BUCKET, AGG_KEY); assertEquals(AGG_RESULTS.length, buckets.size()); } @Test(expected=IllegalArgumentException.class) public void testPluckAggBuckets_canNotFindAggKey() { this.geohashGrid.pluckAggBuckets(AGG_BUCKET, "noGonnaFindMe"); } } ================================================ FILE: gt-elasticsearch-process/src/test/java/mil/nga/giat/process/elasticsearch/GridCoverageUtilTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import java.awt.geom.Point2D; import java.awt.image.RenderedImage; import java.util.stream.IntStream; import org.geotools.coverage.CoverageFactoryFinder; import org.geotools.coverage.grid.GridCoordinates2D; import org.geotools.coverage.grid.GridCoverage2D; import org.geotools.coverage.grid.GridCoverageFactory; import org.geotools.geometry.jts.ReferencedEnvelope; import org.geotools.util.factory.GeoTools; import org.junit.Test; import org.opengis.geometry.MismatchedDimensionException; import static org.junit.Assert.*; public class GridCoverageUtilTest { @Test public void testExactUpScale() { float[][] grid = new float[][] {{1,2},{3,4}}; final GridCoverageFactory coverageFactory = CoverageFactoryFinder.getGridCoverageFactory(GeoTools.getDefaultHints()); final GridCoverage2D coverage = coverageFactory.create("geohashGridAgg", grid, new ReferencedEnvelope(0,1,0,1,null)); GridCoverage2D scaled = GridCoverageUtil.scale(coverage, 4, 4); final RenderedImage renderedImage = scaled.getRenderedImage(); assertEquals(4, renderedImage.getWidth()); assertEquals(4, renderedImage.getHeight()); float[][] expected = new float[][] {{1,1,2,2},{1,1,2,2},{3,3,4,4},{3,3,4,4}}; IntStream.range(0,4).forEach(i->IntStream.range(0, 4).forEach(j -> { float actual = scaled.evaluate(new GridCoordinates2D(j,i), new float[1])[0]; assertEquals(expected[i][j], actual, 1e-10); })); } @Test public void testExactDownScale() { float[][] grid = new float[][] {{1,1,2,2},{1,1,2,2},{3,3,4,4},{3,3,4,4}}; final GridCoverageFactory coverageFactory = CoverageFactoryFinder.getGridCoverageFactory(GeoTools.getDefaultHints()); final GridCoverage2D coverage = coverageFactory.create("geohashGridAgg", grid, new ReferencedEnvelope(0,1,0,1,null)); GridCoverage2D scaled = GridCoverageUtil.scale(coverage, 2,2); final RenderedImage renderedImage = scaled.getRenderedImage(); assertEquals(2, renderedImage.getWidth()); assertEquals(2, renderedImage.getHeight()); float[][] expected = new float[][] {{1,2},{3,4}}; IntStream.range(0,2).forEach(i->IntStream.range(0, 2).forEach(j -> { float actual = scaled.evaluate(new GridCoordinates2D(j,i), new float[1])[0]; assertEquals(expected[i][j], actual, 1e-10); })); } @Test public void testInexactScale() { float[][] grid = new float[][] {{1,2},{3,4}}; final GridCoverageFactory coverageFactory = CoverageFactoryFinder.getGridCoverageFactory(GeoTools.getDefaultHints()); final GridCoverage2D coverage = coverageFactory.create("geohashGridAgg", grid, new ReferencedEnvelope(0,1,0,1,null)); GridCoverage2D scaled = GridCoverageUtil.scale(coverage, 3, 7); final RenderedImage renderedImage = scaled.getRenderedImage(); assertEquals(7, renderedImage.getWidth()); assertEquals(3, renderedImage.getHeight()); } @Test public void testSmallScale() { float[][] grid = new float[1500][1500]; final GridCoverageFactory coverageFactory = CoverageFactoryFinder.getGridCoverageFactory(GeoTools.getDefaultHints()); final GridCoverage2D coverage = coverageFactory.create("geohashGridAgg", grid, new ReferencedEnvelope(0,1,0,1,null)); GridCoverage2D scaled = GridCoverageUtil.scale(coverage, 1501, 1499); final RenderedImage renderedImage = scaled.getRenderedImage(); assertEquals(1499, renderedImage.getWidth()); assertEquals(1501, renderedImage.getHeight()); } @Test public void testLargeScale() { float[][] grid = new float[2][2]; final GridCoverageFactory coverageFactory = CoverageFactoryFinder.getGridCoverageFactory(GeoTools.getDefaultHints()); final GridCoverage2D coverage = coverageFactory.create("geohashGridAgg", grid, new ReferencedEnvelope(0,1,0,1,null)); GridCoverage2D scaled = GridCoverageUtil.scale(coverage, 1501, 1499); final RenderedImage renderedImage = scaled.getRenderedImage(); assertEquals(1499, renderedImage.getWidth()); assertEquals(1501, renderedImage.getHeight()); } @Test public void testCrop() throws MismatchedDimensionException { float[][] grid = new float[][] {{3,4},{1,2}}; final GridCoverageFactory coverageFactory = CoverageFactoryFinder.getGridCoverageFactory(GeoTools.getDefaultHints()); final GridCoverage2D coverage = coverageFactory.create("geohashGridAgg", grid, new ReferencedEnvelope(0,20,0,20,null)); final ReferencedEnvelope envelope = new ReferencedEnvelope(10,20,10,20,null); final GridCoverage2D croppedCoverage = GridCoverageUtil.crop(coverage, envelope); final RenderedImage renderedImage = croppedCoverage.getRenderedImage(); assertEquals(1, renderedImage.getWidth()); assertEquals(1, renderedImage.getHeight()); assertEquals(4, croppedCoverage.evaluate(new Point2D.Double(15,15), new float[1])[0], 1e-10); } } ================================================ FILE: gt-elasticsearch-process/src/test/java/mil/nga/giat/process/elasticsearch/MetricGeoHashGridTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import static org.junit.Assert.*; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.junit.Before; import org.junit.Test; public class MetricGeoHashGridTest { private MetricGeoHashGrid geohashGrid; @Before public void setup() { this.geohashGrid = new MetricGeoHashGrid(); } @Test public void testSetParams_defaults() { geohashGrid.setParams(null); assertEquals(MetricGeoHashGrid.DEFAULT_METRIC_KEY, geohashGrid.getMetricKey()); assertEquals(GeoHashGrid.VALUE_KEY, geohashGrid.getValueKey()); } @Test public void testSetParams() { String metricKey = "mymetric"; String valueKey = "myvalue"; List params = new ArrayList<>(); params.add(metricKey); params.add(valueKey); geohashGrid.setParams(params); assertEquals(metricKey, geohashGrid.getMetricKey()); assertEquals(valueKey, geohashGrid.getValueKey()); } @Test public void testSetParams_justMetric() { String metricKey = "mymetric"; List params = new ArrayList<>(); params.add(metricKey); geohashGrid.setParams(params); assertEquals(metricKey, geohashGrid.getMetricKey()); assertEquals(GeoHashGrid.VALUE_KEY, geohashGrid.getValueKey()); } @Test public void testComputeCellValue() { int value = 5; Map metricBucket = TestUtil.createMetricBucket(1, MetricGeoHashGrid.DEFAULT_METRIC_KEY, GeoHashGrid.VALUE_KEY, value); Number rasterValue = geohashGrid.computeCellValue(metricBucket); assertEquals(value, rasterValue); } } ================================================ FILE: gt-elasticsearch-process/src/test/java/mil/nga/giat/process/elasticsearch/NestedAggGeoHashGridTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import static org.junit.Assert.*; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.junit.Before; import org.junit.Test; public class NestedAggGeoHashGridTest { private static final int[] AGG_RESULTS = {1, 2, 3, 4, 5}; private NestedAggGeoHashGrid geohashGrid; @Before public void setup() { this.geohashGrid = new NestedAggGeoHashGrid(); } @Test public void testSetParams_defaults() { geohashGrid.setParams(null); assertEquals(NestedAggGeoHashGrid.DEFAULT_AGG_KEY, geohashGrid.getNestedAggKey()); assertEquals(NestedAggGeoHashGrid.DEFAULT_METRIC_KEY, geohashGrid.getMetricKey()); assertEquals(GeoHashGrid.VALUE_KEY, geohashGrid.getValueKey()); assertEquals(NestedAggGeoHashGrid.SELECT_LARGEST, geohashGrid.getSelectionStrategy()); assertEquals(NestedAggGeoHashGrid.RASTER_FROM_VALUE, geohashGrid.getRasterStrategy()); assertNull(geohashGrid.getTermsMap()); } @Test public void testSetParams() { String aggKey = "myagg"; String metricKey = "mymetric"; String valueKey = "myvalue"; List params = new ArrayList<>(); params.add(aggKey); params.add(metricKey); params.add(valueKey); params.add(NestedAggGeoHashGrid.SELECT_SMALLEST); params.add(NestedAggGeoHashGrid.RASTER_FROM_KEY); params.add("key1:1;key2:2"); geohashGrid.setParams(params); assertEquals(aggKey, geohashGrid.getNestedAggKey()); assertEquals(metricKey, geohashGrid.getMetricKey()); assertEquals(valueKey, geohashGrid.getValueKey()); assertEquals(NestedAggGeoHashGrid.SELECT_SMALLEST, geohashGrid.getSelectionStrategy()); assertEquals(NestedAggGeoHashGrid.RASTER_FROM_KEY, geohashGrid.getRasterStrategy()); Map termsMap = geohashGrid.getTermsMap(); assertEquals(2, termsMap.size()); assertEquals(new Integer(1), termsMap.get("key1")); assertEquals(new Integer(2), termsMap.get("key2")); } @Test public void testSetParams_ignoreInvalidParams() { String aggKey = "myagg"; String metricKey = "mymetric"; String valueKey = "myvalue"; List params = new ArrayList<>(); params.add(aggKey); params.add(metricKey); params.add(valueKey); params.add("invalid token"); params.add("invalid token"); geohashGrid.setParams(params); assertEquals(aggKey, geohashGrid.getNestedAggKey()); assertEquals(metricKey, geohashGrid.getMetricKey()); assertEquals(valueKey, geohashGrid.getValueKey()); assertEquals(NestedAggGeoHashGrid.SELECT_LARGEST, geohashGrid.getSelectionStrategy()); assertEquals(NestedAggGeoHashGrid.RASTER_FROM_VALUE, geohashGrid.getRasterStrategy()); assertNull(geohashGrid.getTermsMap()); } @Test(expected=IllegalArgumentException.class) public void testSetParams_notEnoughParameters() { geohashGrid.setParams(new ArrayList<>()); } @Test public void testComputeCellValue() { Number rasterValue = geohashGrid.computeCellValue(TestUtil.createAggBucket(NestedAggGeoHashGrid.DEFAULT_AGG_KEY, AGG_RESULTS)); assertEquals(5, rasterValue); } @Test public void testSelectLargest() { Number rasterValue = geohashGrid.selectLargest(TestUtil.createBuckets(AGG_RESULTS)); assertEquals(5, rasterValue); } @Test public void testSelectSmallest() { Number rasterValue = geohashGrid.selectSmallest(TestUtil.createBuckets(AGG_RESULTS)); assertEquals(1, rasterValue); } @Test public void testBucketToRaster_rasterFromValue() { Number bucketValue = 5.0; Number rasterValue = geohashGrid.bucketToRaster("bucket_key", bucketValue); assertEquals(bucketValue, rasterValue); } @Test public void testBucketToRaster_rasterFromNumericKey() { List params = new ArrayList<>(); params.add("aggKey"); params.add("metricKey"); params.add("valueKey"); params.add(NestedAggGeoHashGrid.SELECT_SMALLEST); params.add(NestedAggGeoHashGrid.RASTER_FROM_KEY); geohashGrid.setParams(params); Number rasterValue = geohashGrid.bucketToRaster("1.0", 5.0); assertEquals(1.0, rasterValue); } @Test(expected=IllegalArgumentException.class) public void testBucketToRaster_rasterFromNumericKey_keyIsString() { List params = new ArrayList<>(); params.add("aggKey"); params.add("metricKey"); params.add("valueKey"); params.add(NestedAggGeoHashGrid.SELECT_SMALLEST); params.add(NestedAggGeoHashGrid.RASTER_FROM_KEY); geohashGrid.setParams(params); geohashGrid.bucketToRaster("I am not a number!", 5.0); } @Test public void testBucketToRaster_rasterFromStringKey() { List params = new ArrayList<>(); params.add("aggKey"); params.add("metricKey"); params.add("valueKey"); params.add(NestedAggGeoHashGrid.SELECT_SMALLEST); params.add(NestedAggGeoHashGrid.RASTER_FROM_KEY); params.add("key1:1;key2:2"); geohashGrid.setParams(params); Number rasterValue = geohashGrid.bucketToRaster("key1", 5.0); assertEquals(1, rasterValue); } @Test(expected=IllegalArgumentException.class) public void testBucketToRaster_rasterFromStringKey_keyNotInMap() { List params = new ArrayList<>(); params.add("aggKey"); params.add("metricKey"); params.add("valueKey"); params.add(NestedAggGeoHashGrid.SELECT_SMALLEST); params.add(NestedAggGeoHashGrid.RASTER_FROM_KEY); params.add("key1:1;key2:2"); geohashGrid.setParams(params); geohashGrid.bucketToRaster("key3", 5.0); } } ================================================ FILE: gt-elasticsearch-process/src/test/java/mil/nga/giat/process/elasticsearch/RasterScaleTest.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import static org.junit.Assert.*; import org.junit.Test; public class RasterScaleTest { @Test public void testRasterScale_noScale() { RasterScale scale = new RasterScale(); assertFalse(scale.isScaleSet()); } @Test public void testRasterScale_maxProvided() { float scaleMax = 10.0f; RasterScale scale = new RasterScale(scaleMax); assertTrue(scale.isScaleSet()); assertEquals(0, scale.getScaleMin(), 0.0); assertEquals(scaleMax, scale.getScaleMax(), 0.0); } @Test public void testRasterScale_minMaxProvided() { float scaleMax = 10.0f; float scaleMin = 1.0f; RasterScale scale = new RasterScale(scaleMin, scaleMax); assertTrue(scale.isScaleSet()); assertEquals(scaleMin, scale.getScaleMin(), 0.0); assertEquals(scaleMax, scale.getScaleMax(), 0.0); } @Test(expected=IllegalArgumentException.class) public void testRasterScale_minMaxSame() { float scaleMax = 10.0f; new RasterScale(scaleMax, scaleMax); } @Test public void testRasterScale_scaleValue() { float scaleMax = 10.0f; float scaleMin = 0.0f; RasterScale scale = new RasterScale(scaleMin, scaleMax); scale.prepareScale(30); scale.prepareScale(20); scale.prepareScale(10); scale.prepareScale(40); assertEquals(10, scale.scaleValue(40), 0.0); assertEquals(5, scale.scaleValue(25), 0.0); assertEquals(0, scale.scaleValue(10), 0.0); } @Test public void testRasterScale_scaleValue_emptyScale() { RasterScale scale = new RasterScale(); scale.prepareScale(30); scale.prepareScale(20); scale.prepareScale(10); assertEquals(30, scale.scaleValue(30), 0.0); assertEquals(20, scale.scaleValue(20), 0.0); assertEquals(10, scale.scaleValue(10), 0.0); } @Test public void testRasterScale_scaleValue_dataMinAndDataMaxAreTheSame() { float scaleMax = 10.0f; float scaleMin = 1.0f; RasterScale scale = new RasterScale(scaleMin, scaleMax); scale.prepareScale(30); assertEquals(10, scale.scaleValue(30), 0.0); } @Test public void testRasterScale_log() { RasterScale scale = new RasterScale(true); assertEquals(1, scale.scaleValue(10), 0.0); assertEquals(0, scale.scaleValue(1), 0.0); } @Test public void testRasterScale_logAndScale() { float scaleMax = 10.0f; float scaleMin = 0.0f; RasterScale scale = new RasterScale(scaleMin, scaleMax, true); scale.prepareScale(10); scale.prepareScale(1); assertEquals(10, scale.scaleValue(10), 0.0); assertEquals(0, scale.scaleValue(1), 0.0); } } ================================================ FILE: gt-elasticsearch-process/src/test/java/mil/nga/giat/process/elasticsearch/TestUtil.java ================================================ /* * This file is hereby placed into the Public Domain. This means anyone is * free to do whatever they wish with this file. */ package mil.nga.giat.process.elasticsearch; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.feature.DefaultFeatureCollection; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.opengis.feature.simple.SimpleFeatureType; class TestUtil { public static SimpleFeatureCollection createAggregationFeatures(List> data) { final SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.setName( "testType" ); builder.add("_aggregation", HashMap.class ); builder.add("aString", String.class ); final SimpleFeatureType featureType = builder.buildFeatureType(); final DefaultFeatureCollection collection = new DefaultFeatureCollection(); final SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(featureType); data.forEach(item -> { item.keySet().forEach(key -> featureBuilder.set(key, item.get(key))); collection.add(featureBuilder.buildFeature(null)); }); return collection; } public static Map createDocCountBucket(String bucketName, int docCount) { Map bucket = new HashMap<>(); bucket.put(GeoHashGrid.BUCKET_NAME_KEY, bucketName); bucket.put(GeoHashGrid.DOC_COUNT_KEY, docCount); return bucket; } public static Map createMetricBucket(int docCount, String metricName, String valueName, int value) { Map metric = new HashMap<>(); metric.put(valueName, value); Map bucket = createDocCountBucket("grid_cell_name", docCount); bucket.put(metricName, metric); return bucket; } public static List> createBuckets(int[] values) { List> buckets = new ArrayList<>(); for (int i=0; i createAggBucket(String aggName, int[] values) { int totalDocCount = 0; List> buckets = new ArrayList<>(); for (int i=0; i aggResults = new HashMap<>(); aggResults.put(GeoHashGrid.BUCKETS_KEY, buckets); Map bucket = createDocCountBucket("grid_cell_name", totalDocCount); bucket.put(aggName, aggResults); return bucket; } } ================================================ FILE: joda-shaded/LICENSE.txt ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: joda-shaded/NOTICE.txt ================================================ Elasticsearch Copyright 2009-2017 Elasticsearch This product includes software developed by The Apache Software Foundation (http://www.apache.org/). ================================================ FILE: joda-shaded/pom.xml ================================================ 4.0.0 elasticgeo mil.nga.giat 2.16-SNAPSHOT joda-shaded 2.16-SNAPSHOT jar Elasticsearch Joda Shaded joda-time joda-time ${joda.version} org.apache.maven.plugins maven-dependency-plugin 2.10 unpack process-resources unpack true org.elasticsearch elasticsearch 6.4.3 **/Joda.java,**/FormatDateTimeFormatter.java,**/StrictISODateTimeFormat.java ${project.build.directory}/generated-sources sources org.codehaus.mojo build-helper-maven-plugin 3.0.0 generate-sources add-source ${project.build.directory}/generated-sources maven-shade-plugin 2.3 shade-elasticsearch package shade false org/joda/** org/elasticsearch/** org.joda mil.nga.giat.shaded.joda org.elasticsearch mil.nga.giat.shaded.es false false ================================================ FILE: joda-shaded/src/main/java/org/elasticsearch/common/Strings.java ================================================ /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * * 2017-05 - Reduced to minimal implementation necessary for * compiling org.elasticsearch.common.joda.Joda. */ package org.elasticsearch.common; public class Strings { public static boolean hasLength(String input) { return input != null && !input.isEmpty(); } public static String[] delimitedListToStringArray(String input, String delimiter) { return input.split(delimiter); } } ================================================ FILE: pom.xml ================================================ 4.0.0 mil.nga.giat elasticgeo 2.16-SNAPSHOT pom ElasticGeo ElasticGeo provides a GeoTools data store that allows geospatial features from an Elasticsearch index to be published via OGC services using GeoServer. https://github.com/ngageoint/elasticgeo GNU Lesser General Public License, Version 2.1 http://www.gnu.org/licenses/lgpl-2.1.html joda-shaded gt-elasticsearch gt-elasticsearch-process gs-web-elasticsearch scm:git:https://github.com/ngageoint/elasticgeo.git HEAD https://github.com/ngageoint/elasticgeo.git UTF-8 UTF-8 22.0 2.16.0 7.4.0 7.4.0 2.9.5 2.10.1 25.1-jre 0.8.2 1.2.17 maven2-repository.dev.java.net Java.net repository http://download.java.net/maven/2 osgeo Open Source Geospatial Foundation Repository http://download.osgeo.org/webdav/geotools/ true boundless Boundless Maven Repository http://repo.boundlessgeo.com/main org.apache.maven.plugins maven-surefire-plugin 2.18.1 java.util.logging.config.file ${project.build.directory}/test-classes/logging.properties ${surefireArgLine} org.apache.maven.plugins maven-release-plugin 2.5.3 true org.eclipse.m2e lifecycle-mapping 1.0.0 org.apache.maven.plugins maven-dependency-plugin [2.10,) unpack org.jacoco jacoco-maven-plugin [${jacoco.version},) prepare-agent org.apache.maven.plugins maven-compiler-plugin 2.5.1 1.8 1.8 org.jacoco jacoco-maven-plugin ${jacoco.version} **/org/joda/** **/org/elasticsearch/** pre-unit-test prepare-agent ${project.build.directory}/coverage-reports/jacoco-ut.exec surefireArgLine post-unit-test test report ${project.build.directory}/coverage-reports/jacoco-ut.exec ${project.reporting.outputDirectory}/jacoco-ut pre-integration-test pre-integration-test prepare-agent ${project.build.directory}/coverage-reports/jacoco-it.exec failsafeArgLine post-integration-test post-integration-test report ${project.build.directory}/coverage-reports/jacoco-it.exec ${project.reporting.outputDirectory}/jacoco-it merge-results verify merge ${project.build.directory}/coverage-reports *.exec ${project.build.directory}/coverage-reports/aggregate.exec post-merge-report verify report ${project.build.directory}/coverage-reports/aggregate.exec ${project.reporting.outputDirectory}/jacoco-aggregate org.eluder.coveralls coveralls-maven-plugin 3.1.0 ${project.basedir}/gt-elasticsearch/target/site/jacoco-aggregate/jacoco.xml ${project.basedir}/gt-elasticsearch-process/target/site/jacoco-ut/jacoco.xml