[
  {
    "path": ".gitignore",
    "content": "**/target/\n\n# Mobile Tools for Java (J2ME)\n.mtj.tmp/\n\n# Package Files #\n*.ear\n\n# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml\nhs_err_pid*\n\npom.xml.tag\npom.xml.releaseBackup\npom.xml.versionsBackup\npom.xml.next\nrelease.properties\ndependency-reduced-pom.xml\nbuildNumber.properties\n.DS_Store\n.idea/\n.modules/\napp/app.iml\ncassandra/bigben-cassandra.iml\ncommons/bigben-commons.iml\ncron/cron.iml\nkafka/bigben-kafka.iml\nlib/bigben-lib.iml\n"
  },
  {
    "path": ".looper.yml",
    "content": "tools:\n  jdk: 8\n  maven: 3.5.2\n\ntriggers:\n  - manual: Run default\n  - manual:\n      name: Release Build\n      call: release\n\nflows:\n  default:\n    - call: versionsCheck\n    - call: build\n\n  pr:\n    - echo \"Running build for $GITHUB_PR_URL\"\n    - call: versionsCheck\n    - (name Maven build) mvn -B clean install\n\n  versionsCheck:\n    - (name JDK Version) java -version\n    - (name Maven version) mvn -v\n\n  build:\n   - exposeVars(maven)\n   - (name Project information) echo \"Building ${MAVEN_GROUP_ID}:${MAVEN_ARTIFACT_ID}:${MAVEN_VERSION}\"\n   - (name Maven deploy) mvn -B -DskipTests -Darguments=-DskipTests clean deploy\n\n  release:\n    - call: versionsCheck\n    - (name Maven release) mvn -B -DskipTests -Darguments=-DskipTests clean release:prepare release:perform"
  },
  {
    "path": "LICENSE.txt",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2018 Sandeep Malik\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "README.md",
    "content": "***\n# NOTICE:\n \n## This repository has been archived and is not supported.\n \n[![No Maintenance Intended](http://unmaintained.tech/badge.svg)](http://unmaintained.tech/)\n***\nNOTICE: SUPPORT FOR THIS PROJECT HAS ENDED \n\nThis projected was owned and maintained by Walmart. This project has reached its end of life and Walmart no longer supports this project.\n\nWe will no longer be monitoring the issues for this project or reviewing pull requests. You are free to continue using this project under the license terms or forks of this project at your own risk. This project is no longer subject to Walmart's bug bounty program or other security monitoring.\n\n\n## Actions you can take\n\nWe recommend you take the following action:\n\n  * Review any configuration files used for build automation and make appropriate updates to remove or replace this project\n  * Notify other members of your team and/or organization of this change\n  * Notify your security team to help you evaluate alternative options\n\n## Forking and transition of ownership\n\nFor [security reasons](https://www.theregister.co.uk/2018/11/26/npm_repo_bitcoin_stealer/), Walmart does not transfer the ownership of our primary repos on Github or other platforms to other individuals/organizations. Further, we do not transfer ownership of packages for public package management systems.\n\nIf you would like to fork this package and continue development, you should choose a new name for the project and create your own packages, build automation, etc.\n\nPlease review the licensing terms of this project, which continue to be in effect even after decommission.\n\n# BigBen\n`BigBen` is a generic, multi-tenant, time-based event scheduler and cron \nscheduling framework based on `Cassandra` and `Hazelcast`\n\nIt has following features:\n* **Distributed** - `BigBen` uses a distributed design and can be deployed on 10's or 100's of machines and can be dc-local or cross-dc\n* **Horizontally scalable** - `BigBen` scales linearly with the number of machines. \n* **Fault tolerant** - `BigBen` employs a number of failure protection modes and \ncan withstand arbitrary prolonged down times\n* **Performant** - `BigBen` can easily scale to 10,000's or even millions's of event triggers with a \nvery small cluster of machines. It can also easily manage million's of crons running in a distributed manner\n* **Highly Available** - As long as a single machine is available in the cluster, `BigBen` will guarantee the \nexecution of events (albeit with a lower throughput)\n* **Extremely consistent** - `BigBen` employs a single master design (the master itself is highly available with \n`n-1` masters on standby in an `n` cluster machine) to ensure that no two nodes fire the same event or execute \nthe same cron.  \n* **NoSql based** - `BigBen` comes with default implementation with `Cassandra` but can be easily extended\nto support other `NoSql` or even `RDBMS` data stores  \n* **Auditable** - `BigBen` keeps a track of all the events fired and crons executed with a configurable \nretention \n* **Portable, cloud friendly** - `BigBen` comes as application bundled as `war` or an embedded lib as `jar`, \nand can be deployed on any cloud, `on-prem` or `public`   \n\n## Use cases\n`BigBen` can be used for a variety of time based workloads, both single trigger based or repeating crons. \nSome of the use cases can be\n* **Delayed execution** - E.g. if a job is to be executed 30 mins from now\n* **System retries** - E.g. if a service A wants to call service B and service B is down at the moment, then \nservice A can schedule an exponential backoff retry strategy with retry intervals of \n1 min, 10 mins, 1 hour, 12 hours, and so on.\n* **Timeout tickers** - E.g. if service A sends a message to service B via `Kafka` and expects a response in 1 min, \nthen it can schedule a `timeout check` event to be executed after 1 min\n* **Polling services** - E.g. if service A wants to poll service B at some frequency, it can schedule a cron \nto be executed at some specified frequency\n* **Notification Engine** - `BigBen` can be used to implement `notification engine` with scheduled deliveries, \nscheduled polls, etc\n* **Workflow state machine** - `BigBen` can be used to implement a distributed `workflow` with state suspensions, \nalerts and monitoring of those suspensions.\n\n## Architectural Goals\n`BigBen` was designed to achieve the following goals:\n* Uniformly distributed storage model\n    * Resilient to hot spotting due to sudden surge in traffic\n* Uniform execution load profile in the cluster\n    * Ensure that all nodes have similar load profiles to minimize misfires\n* Linear Horizontal Scaling \n* Lock-free execution\n    * Avoid resource contentions\n* Plugin based architecture to support variety of data bases like `Cassandra, Couchbase, Solr Cloud, Redis, RDBMS`, etc\n* Low maintenance, elastic scaling   \n\n## Design and architecture\nSee the blog published at [Medium](https://medium.com/walmartlabs/an-approach-to-designing-distributed-fault-tolerant-horizontally-scalable-event-scheduler-278c9c380637)\nfor a full description of various design elements of `BigBen`\n\n## Events Inflow\n`BigBen` can receive events in two modes:\n* **kafka** - inbound and outbound Kafka topics to consume event requests and publish event triggers\n* **http** - HTTP APIs to send event requests and HTTP APIs to receive event triggers.\n\n*It is strongly recommended to use `kafka` for better scalability*\n\n### Event Inflow diagram\n![inflow](/docs/assets/inflow.png \"Events Inflow diagram\")\n\n*Request and Response channels can be mixed. For example, the event requests can be sent through HTTP APIs but \nthe event triggers (response) can be received through a Kafka Topic.*\n\n## Event processing guarantees\n`BigBen` has a robust event processing guarantees to survive various failures. \nHowever, `event-processing` is not same as `event-acknowledgement`. \n`BigBen` works in a no-acknowledgement mode (*at least for now*). \nOnce an event is triggered, it is either published to `Kafka` or \nsent through an `HTTP API`. Once the `Kafka` producer returns success, or `HTTP API` returns non-500 status code, \nthe event is **assumed** to be processed and marked as such in the system. \nHowever, for whatever reason if the event was not processed and resulted in an error \n(e.g. `Kafka` producer timing out, or `HTTP API` throwing `503`), \nthen the event will be retried multiple times as per the strategies discussed below\n\n### Event misfire strategy\nMultiple scenarios can cause `BigBen` to be not able to trigger an event on time. Such scenarios are called \nmisfires. Some of them are:\n* `BigBen`'s internal components are down during event trigger. \nE.g. \n    * `BigBen`'s data store is down and events could not be fetched\n    * `VMs` are down\n\n* `Kafka` Producer could not publish due to loss of partitions / brokers or any other reasons\n* `HTTP API` returned a 500 error code\n* Any other unexpected failure\n\nIn any of these cases, the event is first retried in memory using an exponential back-off strategy. \n\nFollowing parameters control the retry behavior:\n\n* _event.processor.max.retries_ - how many in-memory retries will be made before declaring the event as error, default is 3\n* _event.processor.initial.delay_ - how long in seconds the system should wait before kicking in the retry, default is 1 second\n* _event.processor.backoff.multiplier_ - the back off multiplier factor, default is 2. E.g. the intervals would be 1 second, 2 seconds, 4 seconds.\n\nIf the event still is not processed, then the event is marked as `ERROR`. \nAll the events marked `ERROR` are retried up to a configured limit called `events.backlog.check.limit`. \nThis value can be an arbitrary amount of time, e.g. 1 day, 1 week, or even 1 year. E.g. if the the limit\nis set at `1 week` then any event failures will be retried for `1 week` after which, they will be permanently \nmarked as `ERROR` and ignored. The `events.backlog.check.limit` can be changed at any time by changing the \nvalue in `bigben.yaml` file and bouncing the servers.\n\n### Event bucketing and shard size\n`BigBen` shards events by minutes. However, since it's not known in advance how many events will be \nscheduled in a given minute, the buckets are further sharded by a pre defined shard size. The shard size is a \ndesign choice that needs to be made before deployment. Currently, it's not possible to \nchange the shard size once defined. \n\nAn undersized shard value has minimal performance impact, however an oversized shard value may \nkeep some machines idling. The default value of `1000` is good enough for most practical purposes as long as \nnumber of events to be scheduled per minute exceed `1000 x n`, where `n` is the number of machines in the cluster.\nIf the events to be scheduled are much less than `1000` then a smaller shard size may be chosen.      \n\n### Multi shard parallel processing\nEach bucket with all its shards is distributed across the cluster for execution with an algorithm that ensures a \nrandom and uniform distribution. The following diagram shows the execution flow.  \n![shard design](https://cdn-images-1.medium.com/max/1600/1*euaHLOnw6G96SigfXxWhtA.png \"BigBen processing flow\")\n\n### Multi-tenancy\nMultiple tenants can use `BigBen` in parallel. Each one can configure how the events will be delivered once triggered.\nTenant 1 can configure the events to be delivered in `kafka` topic `t1`, where as tenant 2 can have them delivered\nvia a specific `http` url. The usage of tenants will become more clearer with the below explanation of `BigBen` APIs\n\n## Docker support\nBigBen is dockerized and image (`bigben`) is available on docker hub. The code also contains \nscripts, which start `cassandra`, `hazelcast` and `app`.\nTo quickly set up the application for local dev testing, do the following steps:\n1. `git clone $repo`\n2. `cd bigben/build/docker`\n3. execute `./docker_build.sh`\n4. start cassandra container by executing `./cassandra_run.sh`\n5. start app by executing `./app_run.sh`\n6. To run multiple app nodes `export NUM_INSTANCES=3 && ./app_run.sh`\n6. wait for application to start on port `8080`\n7. verify that `curl http://localhost:8080/ping` returns `200`\n8. Use `./cleanup.sh` to stop and remove all `BigBen` related containers \n\n## Non-docker execution\n`BigBen` can be run without docker as well. Following are the steps\n1. `git clone $repo`\n2. `cd bigben/build/exec`\n3. execute `./build.sh`\n4. execute `./app_run.sh`\n\n## Env properties\nYou can set the following environment properties\n1. `APP_CONTAINER_NAME` (default bigben_app)\n2. `SERVER_PORT` (default 8080)\n3. `HZ_PORT` (default 5701)\n4. `NUM_INSTANCES` (default 1)\n5. `LOGS_DIR` (default bigben/../bigben_logs)\n6. `CASSANDRA_SEED_IPS` (default $HOST_IP)\n7. `HZ_MEMBER_IPS` (default $HOST_IP)\n8. `JAVA_OPTS`\n\n#How to override default config values?\n`BigBen` employs an extensive override system to allow someone to override \nthe default properties. The order of priority is system properties > system env variables >\noverrides > defaults\nThe overrides can be defined in `config/overrides.yaml` file.\nThe `log4j.xml` can also be changed to change log behavior without \nrecompiling binaries\n\n## How to setup `Cassandra` for `BigBen`?\nFollowing are the steps to set up `Cassandra`:\n1. git clone the `master` branch\n2. Set up a Cassandra cluster\n3. create a keyspace `bigben` in `Cassandra` cluster with desired replication\n4. Open the file `bigben-schema.cql` and execute `cqlsh -f bigben-schema.cql`\n\n## APIs\n\n### cluster\n`GET /events/cluster`\n* response sample (a 3 node cluster running on single machine and three different ports (5701, 5702, 5703)):\n```json\n{\n    \"[127.0.0.1]:5702\": \"Master\",\n    \"[127.0.0.1]:5701\": \"Slave\",\n    \"[127.0.0.1]:5703\": \"Slave\"\n}\n``` \nThe node marked `Master` is the master node that does the scheduling.\n\n### tenant registration\nA tenant can be registered by calling the following API\n\n`POST /events/tenant/register`\n* payload schema\n```json\n{\n  \"$schema\": \"http://json-schema.org/draft-04/schema#\",\n  \"type\": \"object\",\n  \"properties\": {\n    \"tenant\": {\n      \"type\": \"string\"\n    },\n    \"type\": {\n      \"type\": \"string\"\n    },\n    \"props\": {\n      \"type\": \"object\"\n    }\n  },\n  \"required\": [\n    \"tenant\",\n    \"type\",\n    \"props\"\n  ]\n}\n```\n* `tenant` - specifies a tenant and can be any arbitrary value.\n* `type` - specifies the type of `tenant`. One of the three types can be used\n    * MESSAGING - specifies that `tenant` wants events delivered via a messaging queue. Currently, `kafka` \n    is the only supported messaging system.\n    * HTTP - specifies that `tenant` wants events delivered via an http callback URL. \n    * CUSTOM_CLASS - specifies a custom event processor implemented for custom processing of events\n* `props` - A bag of properties needed for each type of tenant. \n\n* kafka sample:\n```json\n{\n    \"tenant\": \"TenantA/ProgramB/EnvC\",\n    \"type\": \"MESSAGING\",\n    \"props\": {\n        \"topic\": \"some topic name\",\n        \"bootstrap.servers\": \"node1:9092,node2:9092\"\n    }\n}\n```\n* http sample\n```json\n{\n     \"tenant\": \"TenantB/ProgramB/EnvC\",\n     \"type\": \"HTTP\",\n     \"props\": {\n          \"url\": \"http://someurl\",\n          \"headers\": {\n            \"header1\": \"value1\",\n            \"header2\": \"value2\"\n          }\n     }\n}\n```     \n\n### fetch all tenants:\n`GET /events/tenants`\n\n### event scheduling\n`POST /events/schedule`\n\n`Payload - List<EventRequest>`\n\n`EventRequest` schema:\n \n```json\n{\n  \"$schema\": \"http://json-schema.org/draft-04/schema#\",\n  \"type\": \"object\",\n  \"properties\": {\n    \"id\": {\n      \"type\": \"string\"\n    },\n    \"eventTime\": {\n      \"type\": \"string\",\n      \"description\": \"An ISO-8601 formatted timestamp e.g. 2018-01-31T04:00.00Z\"\n    },\n    \"tenant\": {\n      \"type\": \"string\"\n    },\n    \"payload\": {\n      \"type\": \"string\",\n      \"description\": \"an optional event payload, must NOT be null with deliveryOption = PAYLOAD_ONLY\"\n    },\n    \"mode\": { \n      \"type\": \"string\",\n      \"enum\": [\"UPSERT\", \"REMOVE\"],\n      \"default\": \"UPSERT\",\n      \"description\": \"Use REMOVE to delete an event, UPSERT to add/update an event\"\n    },\n    \"deliveryOption\": {\n      \"type\": \"string\",\n      \"enum\": [\"FULL_EVENT\", \"PAYLOAD_ONLY\"],\n      \"default\": \"FULL_EVENT\",\n      \"description\": \"Use FULL_EVENT to have full event delivered via kafka/http, PAYLOAD_ONLY to have only the payload delivered\"\n    }\n  },\n  \"required\": [\n    \"id\",\n    \"eventTime\",\n    \"tenant\"\n  ]\n}\n```\n\n### find an event\n`GET /events/find?id=?&tenant=?`\n\n### dry run\n`POST /events/dryrun?id=?&tenant=?`\n\nfires an event without changing its final status\n\n## cron APIs\ncoming up...\n        \n    \n    \n"
  },
  {
    "path": "app/LICENSE.txt",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2018 Sandeep Malik\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "app/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n    <parent>\n        <groupId>com.walmartlabs.bigben</groupId>\n        <artifactId>bigben</artifactId>\n        <version>1.0.7-SNAPSHOT</version>\n    </parent>\n    <artifactId>bigben-app</artifactId>\n    <name>BigBen:app</name>\n    <url>http://maven.apache.org</url>\n    <packaging>takari-jar</packaging>\n    <properties>\n        <slf4j-api.version>1.7.25</slf4j-api.version>\n    </properties>\n    <dependencies>\n        <dependency>\n            <groupId>com.walmartlabs.bigben</groupId>\n            <artifactId>bigben-lib</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.walmartlabs.bigben</groupId>\n            <artifactId>bigben-cassandra</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.walmartlabs.bigben</groupId>\n            <artifactId>bigben-kafka</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.walmartlabs.bigben</groupId>\n            <artifactId>bigben-cron</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>org.slf4j</groupId>\n            <artifactId>slf4j-log4j12</artifactId>\n            <version>${slf4j-api.version}</version>\n        </dependency>\n        <dependency>\n            <groupId>io.ktor</groupId>\n            <artifactId>ktor-server-core</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>io.ktor</groupId>\n            <artifactId>ktor-server-netty</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>io.netty</groupId>\n            <artifactId>netty-codec-http2</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>io.ktor</groupId>\n            <artifactId>ktor-jackson</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>io.ktor</groupId>\n            <artifactId>ktor-client-core</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>io.ktor</groupId>\n            <artifactId>ktor-client-apache</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.testng</groupId>\n            <artifactId>testng</artifactId>\n            <scope>test</scope>\n        </dependency>\n    </dependencies>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.jetbrains.kotlin</groupId>\n                <artifactId>kotlin-maven-plugin</artifactId>\n            </plugin>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-surefire-plugin</artifactId>\n                <configuration>\n                    <systemPropertyVariables>\n                        <buildDirectory>${project.build.directory}</buildDirectory>\n                    </systemPropertyVariables>\n                </configuration>\n            </plugin>\n            <plugin>\n                <artifactId>maven-resources-plugin</artifactId>\n                <configuration>\n                    <skip>false</skip>\n                </configuration>\n            </plugin>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-assembly-plugin</artifactId>\n                <version>3.1.0</version>\n                <configuration>\n                    <finalName>bigben</finalName>\n                    <appendAssemblyId>false</appendAssemblyId>\n                    <descriptorRefs>\n                        <descriptorRef>jar-with-dependencies</descriptorRef>\n                    </descriptorRefs>\n                    <archive>\n                        <manifest>\n                            <addClasspath>true</addClasspath>\n                            <mainClass>com.walmartlabs.bigben.app.RunKt</mainClass>\n                        </manifest>\n                    </archive>\n                </configuration>\n                <executions>\n                    <execution>\n                        <id>assemble-all</id>\n                        <phase>package</phase>\n                        <goals>\n                            <goal>single</goal>\n                        </goals>\n                    </execution>\n                </executions>\n            </plugin>\n        </plugins>\n    </build>\n</project>\n"
  },
  {
    "path": "app/src/main/kotlin/com/walmartlabs/bigben/app/app.kt",
    "content": "/*-\n * #%L\n * BigBen:app\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.app\n\n/**\n * Created by smalik3 on 2/28/18\n */\nimport com.walmartlabs.bigben.BigBen\nimport com.walmartlabs.bigben.BigBen.module\nimport com.walmartlabs.bigben.api.EventReceiver\nimport com.walmartlabs.bigben.entities.EventRequest\nimport com.walmartlabs.bigben.entities.EventStatus.REJECTED\nimport com.walmartlabs.bigben.extns.bucket\nimport com.walmartlabs.bigben.extns.nowUTC\nimport com.walmartlabs.bigben.utils.*\nimport com.walmartlabs.bigben.utils.commons.Module\nimport com.walmartlabs.bigben.utils.commons.ModuleRegistry\nimport org.slf4j.Logger\nimport java.time.Duration\nimport java.time.ZonedDateTime\nimport java.time.temporal.ChronoUnit\nimport java.util.*\nimport java.util.concurrent.ThreadLocalRandom\nimport java.util.concurrent.TimeUnit\nimport kotlin.system.exitProcess\n\nclass App {\n\n    init {\n        try {\n            val lifecycle =\n                typeRefYaml<Map<String, String?>>(App::class.java.classLoader.getResource(\"bigben-lifecycle.yaml\").readText())\n            initPhase(\"pre-init\", lifecycle, null)\n            val l = logger<App>()\n            l.info(\"phase:pre-init finished\")\n            println(\n                \"\\n\" +\n                        \"  ____  _       ____             \\n\" +\n                        \" |  _ \\\\(_)     |  _ \\\\            \\n\" +\n                        \" | |_) |_  __ _| |_) | ___ _ __  \\n\" +\n                        \" |  _ <| |/ _` |  _ < / _ \\\\ '_ \\\\ \\n\" +\n                        \" | |_) | | (_| | |_) |  __/ | | |\\n\" +\n                        \" |____/|_|\\\\__, |____/ \\\\___|_| |_|\\n\" +\n                        \"           __/ |                 \\n\" +\n                        \"          |___/                  \\n\"\n            )\n            BigBen.init()\n            initPhase(\"post-init\", lifecycle, l)\n            l.info(\"Bigben => successfully started\")\n        } catch (e: Exception) {\n            try {\n                val l: Logger = logger<App>()\n                l.error(\"Bigben:error => unknown error, system will exit\", e.rootCause()!!)\n            } catch (ignore: Exception) {\n            }\n            exitProcess(1)\n        }\n    }\n\n    private fun initPhase(phase: String, lifecycle: Map<String, String?>, l: Logger?) {\n        l?.info(\"phase:$phase started\")\n        lifecycle[\"$phase-class\"]?.run { (Class.forName(this).newInstance() as Module).init(ModuleRegistry()) }\n            ?: lifecycle[\"$phase-object\"]?.run {\n                (Class.forName(this).getDeclaredField(\"INSTANCE\").apply {\n                    isAccessible = true\n                }.get(null) as Module).init(ModuleRegistry())\n            }\n        l?.info(\"phase:$phase finished\")\n    }\n}\n\nobject EventGenerator {\n\n    data class EventGeneration(val offset: String, val period: String, val numEvents: Int, val tenant: String)\n\n    private val l = logger<EventGenerator>()\n\n    fun generateEvents(eg: EventGeneration): Map<ZonedDateTime, Int> {\n        val random = ThreadLocalRandom.current()\n        val t1 = nowUTC().bucket() + Duration.parse(eg.offset)\n        val t2 = t1 + Duration.parse(eg.period)\n        val delta = ChronoUnit.MILLIS.between(t1, t2)\n        l.info(\"generating ${eg.numEvents} random events between $t1 and $t2\")\n        return (1..eg.numEvents).map {\n            val t = if(delta > 0) t1.plus(random.nextLong(delta), ChronoUnit.MILLIS) else t1\n            module<EventReceiver>().addEvent(EventRequest().also {\n                it.tenant = eg.tenant\n                it.eventTime = t.toString()\n                it.id = UUID.randomUUID().toString()\n            }).transform { if (it?.eventStatus == REJECTED) throw IllegalArgumentException(it.error?.message) else it }\n        }.reduce().transform {\n            it!!.groupBy { ZonedDateTime.parse(it!!.eventTime).bucket() }.mapValues { it.value.size }.toSortedMap()\n        }.get(30L, TimeUnit.MINUTES)\n    }\n}\n"
  },
  {
    "path": "app/src/main/kotlin/com/walmartlabs/bigben/app/run.kt",
    "content": "package com.walmartlabs.bigben.app\n\nimport com.fasterxml.jackson.databind.SerializationFeature.INDENT_OUTPUT\nimport com.walmartlabs.bigben.BigBen.module\nimport com.walmartlabs.bigben.api.EventService\nimport com.walmartlabs.bigben.cron.CronService\nimport com.walmartlabs.bigben.extns.APIResponse\nimport com.walmartlabs.bigben.utils.stackTraceAsString\nimport com.walmartlabs.bigben.utils.typeRefJson\nimport io.ktor.application.Application\nimport io.ktor.application.ApplicationCall\nimport io.ktor.application.call\nimport io.ktor.application.install\nimport io.ktor.features.ContentNegotiation\nimport io.ktor.features.StatusPages\nimport io.ktor.http.HttpStatusCode\nimport io.ktor.http.HttpStatusCode.Companion.BadRequest\nimport io.ktor.http.HttpStatusCode.Companion.InternalServerError\nimport io.ktor.jackson.jackson\nimport io.ktor.request.receive\nimport io.ktor.response.header\nimport io.ktor.response.respond\nimport io.ktor.routing.*\nimport io.ktor.server.netty.EngineMain\nimport org.apache.commons.text.StrLookup\nimport org.apache.commons.text.StrSubstitutor\nimport org.apache.log4j.xml.DOMConfigurator\nimport java.io.File\n\nfun main(args: Array<String>) = EngineMain.main(args)\n\nfun logs() {\n    System.getProperty(\"bigben.log.config\")?.run {\n        val logFile = File(this)\n        if (logFile.exists()) {\n            println(\"configuring logger\")\n            StrSubstitutor(StrLookup.systemPropertiesLookup()).run {\n                logFile.readLines().map { replace(it) }\n            }.joinToString(\"\\n\").run {\n                File(System.getProperty(\"java.io.tmpdir\"), \"log4j-overrides-substituted.xml\").let {\n                    println(\"using log file from ${it.absolutePath}\")\n                    it.writeText(this)\n                    DOMConfigurator.configure(it.toURI().toURL())\n                }\n            }\n        }\n    }\n}\n\nfun app() = App()\n\nfun Application.routes() {\n    routing {\n        get(\"/ping\") { call.respond(mapOf(\"status\" to \"OK\")) }\n        route(\"/events\") {\n            val es = module<EventService>()\n            get(\"/cluster\") { call.fromAPIResponse(es.clusterStats()) }\n            post(\"/schedule\") { call.fromAPIResponse(es.schedule(typeRefJson(call.receive()))) }\n            post(\"/tenant/register\") { call.fromAPIResponse(es.registerProcessor(call.receive())) }\n            get(\"/tenants\") { call.fromAPIResponse(es.registeredTenants()) }\n            get(\"/find\") { call.fromAPIResponse(es.find(call.request.queryParameters[\"id\"]!!, call.request.queryParameters[\"tenant\"]!!)) }\n            post(\"/dryrun\") { call.fromAPIResponse(es.dryrun(call.request.queryParameters[\"id\"]!!, call.request.queryParameters[\"tenant\"]!!)) }\n        }\n        post(\"/generation/random\") { call.respond(EventGenerator.generateEvents(call.receive())) }\n        route(\"/cron\") {\n            post { call.fromAPIResponse(CronService.upsert(call.receive())) }\n            get(\"/describe\") { call.fromAPIResponse(CronService.describe(call.receive())) }\n            get(\"/{tenant}/{id}\") {\n                call.fromAPIResponse(\n                    CronService.get(\n                        call.parameters[\"tenant\"]!!, call.parameters[\"id\"]!!,\n                        call.request.queryParameters[\"describe\"]?.toBoolean()\n                    )\n                )\n            }\n            delete(\"/{tenant}/{id}/{type}\") {\n                call.fromAPIResponse(CronService.delete(call.parameters[\"tenant\"]!!, call.parameters[\"id\"]!!, call.parameters[\"type\"]!!))\n            }\n        }\n    }\n}\n\nfun Application.configure() {\n    install(ContentNegotiation) {\n        jackson { enable(INDENT_OUTPUT) }\n    }\n    install(StatusPages) {\n        exception<IllegalArgumentException> { e ->\n            call.response.status(BadRequest)\n            call.respond(mapOf(\"message\" to (e.message ?: \"\")))\n        }\n        exception<Throwable> { e ->\n            call.response.status(InternalServerError)\n            if (call.request.queryParameters[\"debug\"] != null) {\n                call.respond(mapOf(\"message\" to ((e.message ?: \"\")), \"stacktrace\" to e.stackTraceAsString()))\n            } else call.respond(mapOf(\"message\" to (e.message ?: \"\")))\n        }\n    }\n}\n\nprivate suspend fun ApplicationCall.fromAPIResponse(r: APIResponse) {\n    r.headers.forEach { h -> h.value.forEach { response.header(h.key, it) } }\n    response.status(HttpStatusCode.fromValue(r.status))\n    respond(r.entity)\n}"
  },
  {
    "path": "app/src/main/resources/application.conf",
    "content": "ktor {\n  deployment {\n    port = 8080\n    port = ${?app.server.port}\n  }\n  application {\n    modules = [\n      com.walmartlabs.bigben.app.RunKt.logs\n      com.walmartlabs.bigben.app.RunKt.configure\n      com.walmartlabs.bigben.app.RunKt.app\n      com.walmartlabs.bigben.app.RunKt.routes\n    ]\n  }\n}"
  },
  {
    "path": "app/src/main/resources/bigben-lifecycle.yaml",
    "content": "pre-init-class: null\npost-init-class: null"
  },
  {
    "path": "app/src/main/resources/bigben.yaml",
    "content": "# top level modules\nmodules:\n  - name: domain\n    class: com.walmartlabs.bigben.providers.domain.cassandra.CassandraModule\n  - name: processors\n    object: com.walmartlabs.bigben.processors.ProcessorRegistry\n  - name: hz\n    class: com.walmartlabs.bigben.utils.hz.Hz\n  - name: scheduler\n    object: com.walmartlabs.bigben.SchedulerModule\n  - name: events\n    object: com.walmartlabs.bigben.EventModule\n  - name: messaging\n    object: com.walmartlabs.bigben.kafka.KafkaModule\n    enabled: ${kafka.module.enabled:-false}\n  - name: cron\n    object: com.walmartlabs.bigben.cron.CronRunner\n    enabled: ${cron.module.enabled:-false}\n\n# hazelcast properties\nhz:\n  template: file://hz.template.xml\n  group:\n    name: bigben-dev\n    password: bigben-dev\n  network:\n    autoIncrementPort: true\n    members: 127.0.0.1\n    port: 5701\n  map:\n    store:\n      writeDelay: 30\n\n# message related properties\nmessaging.producer.factory.class: com.walmartlabs.bigben.kafka.KafkaMessageProducerFactory\n\n# cassandra related properties\ncassandra:\n  keyspace: bigben\n  cluster:\n    contactPoints: 127.0.0.1\n    clusterName: bigben-cluster\n    port: 9042\n    localDataCenter: null\n    coreConnectionsPerLocalHost: 1\n    maxConnectionsPerLocalHost: 1\n    coreConnectionsPerRemoteHost: 1\n    maxConnectionsPerRemoteHost: 1\n    maxRequestsPerLocalConnection: 32768\n    maxRequestsPerRemoteConnection: 2048\n    newLocalConnectionThreshold: 3000\n    newRemoteConnectionThreshold: 400\n    poolTimeoutMillis: 0\n    keepTCPConnectionAlive: true\n    connectionTimeOut: 5000\n    readTimeout: 12000\n    reconnectPeriod: 5\n    username: null\n    password: null\n    downgradingConsistency: false\n    writeConsistency: LOCAL_ONE\n    readConsistency: LOCAL_ONE\n\n# kafka consumer properties\nkafka:\n  consumers:\n    - num.consumers: ${num.consumers:-8}\n      processor.impl.class: com.walmartlabs.bigben.kafka.ProcessorImpl\n      topics: ${bigben.inbound.topic.name:-null}\n      max.poll.wait.time: ${max.poll.wait.time:-10000}\n      message.retry.max.count: ${message.retry.max.count:-10}\n      config:\n        key.deserializer: org.apache.kafka.common.serialization.StringDeserializer\n        value.deserializer: org.apache.kafka.common.serialization.StringDeserializer\n        bootstrap.servers: ${bigben.inbound.topic.bootstrap.servers:-null}\n        #fetch.min.bytes: 1\n        group.id: ${group.id:-bigben-inbound}\n        heartbeat.interval.ms: ${heartbeat.interval.ms:-3000}\n        session.timeout.ms: 30000\n        auto.offset.reset: ${auto.offset.reset:-latest}\n        fetch.max.bytes: 324000\n        max.poll.interval.ms: 30000\n        max.poll.records: 100\n        receive.buffer.bytes: 65536\n        request.timeout.ms: 60000\n        #send.buffer.bytes: 131072\n        enable.auto.commit: ${enable.auto.commit:-false}\n  producer:\n    config: # this is default kafka producer config, these values will be used if not supplied during the tenant registration\n      key.serializer: org.apache.kafka.common.serialization.StringSerializer\n      value.serializer: org.apache.kafka.common.serialization.StringSerializer\n      acks: \"1\"\n      buffer.memory: 32400\n      retries: 3\n\n# system properties\ntask:\n  executor:\n    #retry.thread.count: 8\n    retry.time.units: SECONDS\n    delay: 1\n    max.retries: 3\n    backoff.multiplier: 2\n\napp.server.port: 8080\ngeneric.future.max.get.time: 60\n\nevents:\n  scheduler.enabled: true\n  schedule.scan.interval.minutes: 1\n  num.shard.submitters: 8\n  receiver:\n    shard.size: 1000\n    lapse.offset.minutes: 0\n    delete:\n      max.retries: 3\n      initial.delay: 1\n      backoff.multiplier: 1\n  submit:\n    initial.delay: 1\n    backoff.multiplier: 1\n    max.retries: 3\n  processor:\n    max.retries: 3\n    initial.delay: 1\n    backoff.multiplier: 2\n    eager.loading: true\n  tasks:\n    max.events.in.memory: 100000\n    scheduler.worker.threads: 8\n\n# bucket manager / loader related properties\nbuckets:\n  backlog.check.limit: 1440 # 1 Day\n  background:\n    load.fetch.size: 100\n    load.wait.interval.seconds: 15\n\ncron:\n  runner:\n    core.pool.size: 8\n  load:\n    max.retries: 10\n    delay: 1\n    backoff.multiplier: 1\n    time.units: \"SECONDS\""
  },
  {
    "path": "app/src/main/resources/log4j.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!--\n  #%L\n  BigBen:app\n  =======================================\n  Copyright (C) 2016 - 2018 Walmart Inc.\n  =======================================\n  Licensed under the Apache License, Version 2.0 (the \"License\");\n  you may not use this file except in compliance with the License.\n  You may obtain a copy of the License at\n  \n       http://www.apache.org/licenses/LICENSE-2.0\n  \n  Unless required by applicable law or agreed to in writing, software\n  distributed under the License is distributed on an \"AS IS\" BASIS,\n  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  See the License for the specific language governing permissions and\n  limitations under the License.\n  #L%\n  -->\n\n<!DOCTYPE log4j:configuration SYSTEM \"log4j.dtd\">\n<log4j:configuration debug=\"true\"\n                     xmlns:log4j='http://jakarta.apache.org/log4j/'\n                     xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n                     xsi:schemaLocation=\"http://jakarta.apache.org/log4j/ \">\n\n    <appender name=\"console\" class=\"org.apache.log4j.ConsoleAppender\">\n        <layout class=\"org.apache.log4j.PatternLayout\">\n            <param name=\"ConversionPattern\"\n                   value=\"%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%t] %c{1}:%L - %m%n\"/>\n        </layout>\n    </appender>\n\n    <root>\n        <level value=\"WARN\"/>\n        <appender-ref ref=\"console\"/>\n    </root>\n\n    <logger name=\"com.walmartlabs.bigben\" additivity=\"false\">\n        <level value=\"INFO\"/>\n        <appender-ref ref=\"console\"/>\n    </logger>\n\n</log4j:configuration>\n"
  },
  {
    "path": "app/src/test/kotlin/com/walmartlabs/bigben/tests/APITests.kt",
    "content": "package com.walmartlabs.bigben.tests\n\nimport com.datastax.driver.core.Session\nimport com.fasterxml.jackson.databind.SerializationFeature.INDENT_OUTPUT\nimport com.walmartlabs.bigben.BigBen\nimport com.walmartlabs.bigben.BigBen.module\nimport com.walmartlabs.bigben.app.EventGenerator\nimport com.walmartlabs.bigben.app.main\nimport com.walmartlabs.bigben.entities.EntityProvider\nimport com.walmartlabs.bigben.entities.EventDeliveryOption.FULL_EVENT\nimport com.walmartlabs.bigben.entities.EventDeliveryOption.PAYLOAD_ONLY\nimport com.walmartlabs.bigben.entities.EventLoader\nimport com.walmartlabs.bigben.entities.EventRequest\nimport com.walmartlabs.bigben.entities.EventResponse\nimport com.walmartlabs.bigben.entities.EventStatus.*\nimport com.walmartlabs.bigben.extns.nowUTC\nimport com.walmartlabs.bigben.kafka.MockMessageProducerFactory\nimport com.walmartlabs.bigben.processors.ProcessorConfig\nimport com.walmartlabs.bigben.processors.ProcessorConfig.Type.*\nimport com.walmartlabs.bigben.utils.fromJson\nimport com.walmartlabs.bigben.utils.json\nimport com.walmartlabs.bigben.utils.stackTraceAsString\nimport com.walmartlabs.bigben.utils.typeRefJson\nimport io.ktor.application.call\nimport io.ktor.application.install\nimport io.ktor.client.HttpClient\nimport io.ktor.client.call.call\nimport io.ktor.client.engine.apache.Apache\nimport io.ktor.client.request.accept\nimport io.ktor.client.request.post\nimport io.ktor.client.request.url\nimport io.ktor.client.response.readText\nimport io.ktor.content.TextContent\nimport io.ktor.features.ContentNegotiation\nimport io.ktor.features.StatusPages\nimport io.ktor.http.ContentType.Application.Json\nimport io.ktor.http.HttpMethod\nimport io.ktor.http.HttpMethod.Companion.Get\nimport io.ktor.http.HttpMethod.Companion.Post\nimport io.ktor.http.HttpStatusCode\nimport io.ktor.http.HttpStatusCode.Companion.OK\nimport io.ktor.jackson.jackson\nimport io.ktor.request.contentType\nimport io.ktor.request.header\nimport io.ktor.request.receive\nimport io.ktor.response.respond\nimport io.ktor.routing.post\nimport io.ktor.routing.routing\nimport io.ktor.server.engine.embeddedServer\nimport io.ktor.server.netty.Netty\nimport kotlinx.coroutines.runBlocking\nimport org.testng.annotations.AfterClass\nimport org.testng.annotations.BeforeClass\nimport org.testng.annotations.Test\nimport java.time.ZonedDateTime\nimport java.util.UUID.randomUUID\nimport java.util.concurrent.ConcurrentHashMap\nimport java.util.concurrent.SynchronousQueue\nimport kotlin.concurrent.thread\nimport kotlin.test.assertEquals\nimport kotlin.test.assertFalse\nimport kotlin.test.assertTrue\n\nclass APITests {\n\n    companion object {\n        init {\n            System.setProperty(\"bigben.configs\", \"file://bigben-api-test.yaml\")\n            thread { main(emptyArray()) }\n            BigBen.init()\n        }\n    }\n\n    private val client = HttpClient(Apache)\n    private val server = \"http://localhost:8080\"\n\n    private val responses = ConcurrentHashMap<String, SynchronousQueue<EventResponse>>()\n    private val payloadResponse = SynchronousQueue<String>()\n\n    @BeforeClass\n    private fun `clean up db`() {\n        println(\"cleaning up the db\")\n        (module<EntityProvider<Any>>().unwrap() as Session).apply {\n            execute(\"truncate bigben.events;\")\n            execute(\"truncate bigben.lookups;\")\n            execute(\"truncate bigben.buckets;\")\n            execute(\"truncate bigben.kv_table;\")\n        }\n        thread {\n            embeddedServer(Netty, 9090) {\n                println(\"starting test server\")\n                install(ContentNegotiation) {\n                    jackson { enable(INDENT_OUTPUT) }\n                }\n                install(StatusPages) {\n                    exception<Throwable> { e ->\n                        //e.printStackTrace()\n                        call.response.status(HttpStatusCode.InternalServerError)\n                        call.respond(mapOf(\"message\" to ((e.message ?: \"\")), \"stacktrace\" to e.stackTraceAsString()))\n                    }\n                }\n                routing {\n                    post(\"/test\") {\n                        if (call.request.queryParameters[\"error\"] != null) {\n                            throw IllegalArgumentException(\"test error\")\n                        }\n                        val er = call.receive<EventResponse>()\n                        assertEquals(call.request.header(\"header1\"), \"value1\")\n                        assertEquals(call.request.header(\"header2\"), \"value2\")\n                        assertEquals(call.request.contentType().contentType, Json.contentType)\n                        assertEquals(call.request.contentType().contentSubtype, Json.contentSubtype)\n                        responses[er.id!!]!!.put(er)\n                        call.respond(OK, mapOf(\"status\" to \"OK\"))\n                    }\n                    post(\"/payload\") {\n                        if (call.request.queryParameters[\"error\"] != null) {\n                            throw IllegalArgumentException(\"test error\")\n                        }\n                        val payload = call.receive<String>()\n                        assertEquals(call.request.header(\"header1\"), \"value1\")\n                        assertEquals(call.request.header(\"header2\"), \"value2\")\n                        assertEquals(call.request.contentType().contentType, Json.contentType)\n                        assertEquals(call.request.contentType().contentSubtype, Json.contentSubtype)\n                        payloadResponse.put(payload)\n                        call.respond(OK, mapOf(\"status\" to \"OK\"))\n                    }\n                }\n            }.start(true)\n        }\n    }\n\n    @AfterClass\n    fun teardown() {\n        client.close()\n    }\n\n    @Test(enabled = true)\n    fun `test events at the same time`() {\n        val tenant = \"test\"\n\n        assertEquals(runBlocking {\n            client.call {\n                url(\"$server/events/tenant/register\")\n                accept(Json)\n                method = Post\n                body = TextContent(\n                    ProcessorConfig(\n                        tenant, CUSTOM_CLASS,\n                        mapOf(\"eventProcessorClass\" to \"com.walmartlabs.bigben.processors.NoOpCustomClassProcessor\")\n                    ).json(), Json\n                )\n            }.response.status.value\n        }, 200)\n\n        // schedule 1000 events at exactly same time at the start of the minute:\n        runBlocking {\n            client.post<String> {\n                url(\"$server/generation/random\")\n                accept(Json)\n                body = TextContent(EventGenerator.EventGeneration(\"PT1M\", \"PT0S\", 1000, tenant).json(), Json)\n            }\n        }\n\n        // schedule 1000 events at exactly same time at the start of the minute + 30 seconds:\n        val bucket = runBlocking {\n            client.post<String> {\n                url(\"$server/generation/random\")\n                accept(Json)\n                body = TextContent(EventGenerator.EventGeneration(\"PT1M30S\", \"PT0S\", 1000, tenant).json(), Json)\n            }\n        }.run { typeRefJson<Map<String, Int>>(this).run { ZonedDateTime.parse(entries.first().key) } }\n\n        Thread.sleep(2 * 60 * 1000) // sleep for 2 minutes\n\n        var total = 0\n        (0..1).forEach {\n            // 2000 events -> 2 shards\n            var l = module<EventLoader>().load(bucket, 0, 400).get()\n            while (l.second.isNotEmpty()) {\n                l.second.forEach {\n                    assertEquals(it.bucketId, bucket)\n                    assertEquals(it.status, PROCESSED)\n                    assertTrue { it.eventTime == bucket || it.eventTime == bucket.plusSeconds(30) }\n                    total++\n                }\n                l = module<EventLoader>().load(bucket, 0, 400, l.second.last().eventTime!!, l.second.last().id!!, l.first)\n                    .get()\n            }\n        }\n    }\n\n    @Test\n    fun `test unknown tenant rejection`() {\n        val (status, content) = client.call(\"/events/schedule\", listOf(EventRequest(\"id123\", nowUTC().toString(), \"ABC\")))\n        assertEquals(status, 400)\n        assertEquals(typeRefJson<List<EventResponse>>(content)[0].eventStatus, REJECTED)\n    }\n\n    @Test\n    fun `test missing tenant`() {\n        val (status, content) = client.call(\"/events/schedule\", listOf(EventRequest(\"id123\", nowUTC().toString())))\n        assertEquals(status, 400)\n        assertEquals(typeRefJson<List<EventResponse>>(content)[0].eventStatus, REJECTED)\n    }\n\n    @Test\n    fun `test missing event time`() {\n        val (status, content) = client.call(\"/events/schedule\", listOf(EventRequest(\"id123\", tenant = \"ABC\")))\n        assertEquals(status, 400)\n        assertEquals(typeRefJson<List<EventResponse>>(content)[0].eventStatus, REJECTED)\n    }\n\n    @Test\n    fun `test event time wrong format`() {\n        val (status, content) = client.call(\"/events/schedule\", listOf(EventRequest(\"id123\", \"time\", tenant = \"ABC\")))\n        assertEquals(status, 400)\n        assertEquals(typeRefJson<List<EventResponse>>(content)[0].eventStatus, REJECTED)\n    }\n\n    @Test\n    fun `test event time in past`() {\n        val tenant = \"http\"\n        assertEquals(\n            client.call(\n                \"/events/tenant/register\", ProcessorConfig(\n                    tenant, HTTP, mapOf(\n                        \"url\" to \"http://localhost:9090/test\", \"headers\" to mapOf(\"header1\" to \"value1\", \"header2\" to \"value2\")\n                    )\n                )\n            ).first, 200\n        )\n\n        val eventId = \"id123\"\n        responses[eventId] = SynchronousQueue()\n        val (status, content) = client.call(\"/events/schedule\", listOf(EventRequest(eventId, nowUTC().minusMinutes(1).toString(), tenant)))\n        assertEquals(status, 200)\n        assertEquals(typeRefJson<List<EventResponse>>(content)[0].eventStatus, TRIGGERED)\n        val er = responses[eventId]!!.take()\n        assertEquals(er.eventStatus, TRIGGERED)\n        assertEquals(er.deliveryOption, FULL_EVENT)\n        assertEquals(er.tenant!!, tenant)\n    }\n\n    @Test\n    fun `test event time in past - payload only`() {\n        val tenant = \"http3\"\n        assertEquals(\n            client.call(\n                \"/events/tenant/register\", ProcessorConfig(\n                    tenant, HTTP, mapOf(\n                        \"url\" to \"http://localhost:9090/payload\", \"headers\" to mapOf(\"header1\" to \"value1\", \"header2\" to \"value2\")\n                    )\n                )\n            ).first, 200\n        )\n\n        val eventId = \"id234\"\n        responses[eventId] = SynchronousQueue()\n        val (status, content) = client.call(\"/events/schedule\", listOf(EventRequest(eventId, nowUTC().minusMinutes(1).toString(), tenant, payload = \"testP\", deliveryOption = PAYLOAD_ONLY)))\n        assertEquals(status, 200)\n        assertEquals(typeRefJson<List<EventResponse>>(content)[0].eventStatus, TRIGGERED)\n        val payload = payloadResponse.take()\n        assertEquals(payload, \"testP\")\n    }\n\n    @Test\n    fun `test event - null payload with payload only option`() {\n        val tenant = \"http2\"\n        assertEquals(\n            client.call(\n                \"/events/tenant/register\", ProcessorConfig(\n                    tenant, HTTP, mapOf(\n                        \"url\" to \"http://localhost:9090/test\", \"headers\" to mapOf(\"header1\" to \"value1\", \"header2\" to \"value2\")\n                    )\n                )\n            ).first, 200\n        )\n        val eventId = randomUUID().toString()\n        responses[eventId] = SynchronousQueue()\n        val (status, content) = client.call(\"/events/schedule\", listOf(EventRequest(eventId, nowUTC().plusMinutes(1).toString(), tenant, deliveryOption = PAYLOAD_ONLY)))\n        assertEquals(status, 400)\n        assertEquals(typeRefJson<List<EventResponse>>(content)[0].eventStatus, REJECTED)\n    }\n\n    @Test\n    fun `test find and dryrun APIs`() {\n        val tenant = \"http4\"\n        assertEquals(\n            client.call(\n                \"/events/tenant/register\", ProcessorConfig(\n                    tenant, HTTP, mapOf(\n                        \"url\" to \"http://localhost:9090/test\", \"headers\" to mapOf(\"header1\" to \"value1\", \"header2\" to \"value2\")\n                    )\n                )\n            ).first, 200\n        )\n        val eventId = randomUUID().toString()\n        responses[eventId] = SynchronousQueue()\n        val eventTime = nowUTC().plusMinutes(100000).toString()\n        val (status, content) = client.call(\"/events/schedule\", listOf(EventRequest(eventId, eventTime, tenant, payload = \"P1\")))\n        assertEquals(status, 200)\n        assertEquals(typeRefJson<List<EventResponse>>(content)[0].eventStatus, ACCEPTED)\n\n        val (s, c) = client.call(\"/events/find?tenant=$tenant&id=$eventId\", null, Get)\n        assertEquals(s, 200)\n        val er = EventResponse::class.java.fromJson(c)\n        assertEquals(er.eventStatus, UN_PROCESSED)\n        assertEquals(er.payload, \"P1\")\n        assertEquals(er.tenant, tenant)\n        assertFalse { er.eventId!!.startsWith(\"a-\") }\n        assertEquals(er.deliveryOption, FULL_EVENT)\n        assertEquals(er.eventTime, eventTime)\n\n        val (s1, c1) = client.call(\"/events/dryrun?tenant=$tenant&id=$eventId\", null)\n        assertEquals(s1, 200)\n        val er1 = EventResponse::class.java.fromJson(c1)\n        assertEquals(er1.eventStatus, UN_PROCESSED)\n        assertEquals(er1.payload, \"P1\")\n        assertEquals(er1.tenant, tenant)\n        assertFalse { er1.eventId!!.startsWith(\"a-\") }\n        assertEquals(er1.deliveryOption, FULL_EVENT)\n        assertEquals(er1.eventTime, eventTime)\n\n        responses[eventId]!!.take().apply {\n            assertEquals(eventStatus, TRIGGERED)\n            assertEquals(payload, \"P1\")\n            assertEquals(this.tenant, tenant)\n            assertFalse { this.eventId!!.startsWith(\"a-\") }\n            assertEquals(deliveryOption, FULL_EVENT)\n            assertEquals(eventTime, eventTime)\n        }\n    }\n\n    @Test\n    fun `test find and dryrun APIs - payload only`() {\n        val tenant = \"http5\"\n        assertEquals(\n            client.call(\n                \"/events/tenant/register\", ProcessorConfig(\n                    tenant, HTTP, mapOf(\n                        \"url\" to \"http://localhost:9090/payload\", \"headers\" to mapOf(\"header1\" to \"value1\", \"header2\" to \"value2\")\n                    )\n                )\n            ).first, 200\n        )\n        val eventId = randomUUID().toString()\n        responses[eventId] = SynchronousQueue()\n        val eventTime = nowUTC().plusMinutes(100000).toString()\n        val (status, content) = client.call(\"/events/schedule\", listOf(EventRequest(eventId, eventTime, tenant, payload = \"P1\", deliveryOption = PAYLOAD_ONLY)))\n        assertEquals(status, 200)\n        assertEquals(typeRefJson<List<EventResponse>>(content)[0].eventStatus, ACCEPTED)\n\n        val (s, c) = client.call(\"/events/find?tenant=$tenant&id=$eventId\", null, Get)\n        assertEquals(s, 200)\n        val er = EventResponse::class.java.fromJson(c)\n        assertEquals(er.eventStatus, UN_PROCESSED)\n        assertEquals(er.payload, \"P1\")\n        assertEquals(er.tenant, tenant)\n        assertTrue { er.eventId!!.startsWith(\"a-\") }\n        assertEquals(er.deliveryOption, PAYLOAD_ONLY)\n        assertEquals(er.eventTime, eventTime)\n\n        val (s1, c1) = client.call(\"/events/dryrun?tenant=$tenant&id=$eventId\", null)\n        assertEquals(s1, 200)\n        val er1 = EventResponse::class.java.fromJson(c1)\n        assertEquals(er1.eventStatus, UN_PROCESSED)\n        assertEquals(er1.payload, \"P1\")\n        assertEquals(er1.tenant, tenant)\n        assertTrue { er1.eventId!!.startsWith(\"a-\") }\n        assertEquals(er1.deliveryOption, PAYLOAD_ONLY)\n        assertEquals(er1.eventTime, eventTime)\n\n        payloadResponse.take().apply {\n            assertEquals(this, \"P1\")\n        }\n    }\n\n    @Test\n    fun `test event kafka tenant API`() {\n        val tenant = \"kafka1\"\n        assertEquals(\n            client.call(\n                \"/events/tenant/register\", ProcessorConfig(\n                    tenant, MESSAGING, mapOf(\n                        \"topic\" to \"topic1\",\n                        \"bootstrap.servers\" to \"localhost:9092\"\n                    )\n                )\n            ).first, 200\n        )\n        val eventId = randomUUID().toString()\n        responses[eventId] = SynchronousQueue()\n        val eventTime = nowUTC().minusMinutes(1).toString()\n        val (status, content) = client.call(\"/events/schedule\", listOf(EventRequest(eventId, eventTime, tenant, payload = \"P1\")))\n        assertEquals(status, 200)\n        assertEquals(typeRefJson<List<EventResponse>>(content)[0].eventStatus, TRIGGERED)\n        Thread.sleep(2000)\n        val er = MockMessageProducerFactory.LAST_MESSAGE.get()\n        assertEquals(er.eventStatus, TRIGGERED)\n        assertEquals(er.payload, \"P1\")\n        assertEquals(er.tenant, tenant)\n        assertEquals(er.deliveryOption, FULL_EVENT)\n        assertEquals(er.eventTime, eventTime)\n    }\n\n    @Test\n    fun `test event kafka tenant API - payload only`() {\n        val tenant = \"kafka2\"\n        assertEquals(\n            client.call(\n                \"/events/tenant/register\", ProcessorConfig(\n                    tenant, MESSAGING, mapOf(\n                        \"topic\" to \"topic2\",\n                        \"bootstrap.servers\" to \"localhost:9092\"\n                    )\n                )\n            ).first, 200\n        )\n        val eventId = randomUUID().toString()\n        responses[eventId] = SynchronousQueue()\n        val eventTime = nowUTC().minusMinutes(1).toString()\n        val (status, content) = client.call(\"/events/schedule\", listOf(EventRequest(eventId, eventTime, tenant, payload = \"P2\", deliveryOption = PAYLOAD_ONLY)))\n        assertEquals(status, 200)\n        assertEquals(typeRefJson<List<EventResponse>>(content)[0].eventStatus, TRIGGERED)\n        val er = MockMessageProducerFactory.LAST_MESSAGE.get()\n        assertEquals(er.eventStatus, TRIGGERED)\n        assertEquals(er.payload, \"P2\")\n        assertEquals(er.tenant, tenant)\n        assertEquals(er.deliveryOption, PAYLOAD_ONLY)\n        assertEquals(er.eventTime, eventTime)\n    }\n\n    private fun HttpClient.call(url: String, body: Any?, method: HttpMethod = Post): Pair<Int, String> {\n        return runBlocking {\n            client.call {\n                url(\"$server$url\")\n                accept(Json)\n                this.method = method\n                body?.let { this.body = TextContent(it.json(), Json) }\n            }.response.run { status.value to this.readText().apply { println(\"response: $this\") } }\n        }\n    }\n}\n\n"
  },
  {
    "path": "app/src/test/kotlin/com/walmartlabs/bigben/tests/BigBenTests.kt",
    "content": "/*-\n * #%L\n * BigBen:app\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.tests\n\nimport com.datastax.driver.core.Session\nimport com.google.common.util.concurrent.ListeningScheduledExecutorService\nimport com.sun.net.httpserver.HttpServer\nimport com.walmartlabs.bigben.BigBen\nimport com.walmartlabs.bigben.BigBen.module\nimport com.walmartlabs.bigben.api.EventService\nimport com.walmartlabs.bigben.core.BucketManager\nimport com.walmartlabs.bigben.core.BucketsLoader\nimport com.walmartlabs.bigben.core.ScheduleScanner\nimport com.walmartlabs.bigben.entities.*\nimport com.walmartlabs.bigben.entities.EventStatus.*\nimport com.walmartlabs.bigben.entities.Mode.REMOVE\nimport com.walmartlabs.bigben.extns.bucket\nimport com.walmartlabs.bigben.extns.fetch\nimport com.walmartlabs.bigben.extns.nowUTC\nimport com.walmartlabs.bigben.extns.save\nimport com.walmartlabs.bigben.processors.NoOpCustomClassProcessor\nimport com.walmartlabs.bigben.processors.ProcessorConfig\nimport com.walmartlabs.bigben.processors.ProcessorConfig.Type.*\nimport com.walmartlabs.bigben.utils.commons.Props.int\nimport com.walmartlabs.bigben.utils.commons.Props.map\nimport com.walmartlabs.bigben.utils.commons.TaskExecutor\nimport com.walmartlabs.bigben.utils.fromJson\nimport com.walmartlabs.bigben.utils.json\nimport org.apache.commons.text.RandomStringGenerator\nimport org.apache.kafka.clients.consumer.KafkaConsumer\nimport org.apache.kafka.clients.producer.KafkaProducer\nimport org.apache.kafka.clients.producer.ProducerRecord\nimport org.testng.annotations.BeforeClass\nimport org.testng.annotations.BeforeMethod\nimport org.testng.annotations.Test\nimport java.lang.Thread.sleep\nimport java.net.InetSocketAddress\nimport java.time.ZonedDateTime\nimport java.util.*\nimport java.util.concurrent.CountDownLatch\nimport java.util.concurrent.ScheduledThreadPoolExecutor\nimport java.util.concurrent.TimeUnit.MINUTES\nimport java.util.concurrent.atomic.AtomicInteger\nimport kotlin.reflect.full.memberProperties\nimport kotlin.reflect.jvm.isAccessible\nimport kotlin.test.assertEquals\nimport kotlin.test.assertTrue\n\n\n/**\n * Created by smalik3 on 4/11/18\n */\nclass BigBenTests {\n\n    companion object {\n        init {\n            System.setProperty(\"bigben.configs\", \"file://bigben-test.yaml,file://bigben.yaml\")\n            BigBen.init()\n        }\n\n        private val eventService = BigBen.module<EventService>()\n    }\n\n    @BeforeClass\n    fun `set up tenant`() {\n        `clean up db`()\n        eventService.registerProcessor(\n            ProcessorConfig(\n                \"default\", CUSTOM_CLASS,\n                mapOf(\"eventProcessorClass\" to NoOpCustomClassProcessor::class.java.name)\n            )\n        ).apply { assertEquals(this.status, 200) }\n        println(\"tenant set up done\")\n    }\n\n    @BeforeMethod\n    private fun `clean up db`() {\n        println(\"cleaning up the db\")\n        (module<EntityProvider<Any>>().unwrap() as Session).apply {\n            execute(\"truncate bigben.events;\")\n            execute(\"truncate bigben.lookups;\")\n            execute(\"truncate bigben.buckets;\")\n            execute(\"truncate bigben.kv_table;\")\n        }\n    }\n\n    @Test\n    fun `event service schedule and find API`() {\n        val eventTime = nowUTC().plusMinutes(3)\n        val tenant = \"default\"\n        val xrefId = \"abc\"\n\n        //add:\n        eventService.schedule(listOf(EventRequest(xrefId, eventTime.toString(), tenant, \"P\"))).apply {\n            assertEquals(status, 200)\n        }\n        eventService.find(xrefId, tenant).apply {\n            assertEquals(status, 200)\n            (entity as EventResponse).apply {\n                assertEquals(ZonedDateTime.parse(this.eventTime), eventTime)\n                assertEquals(payload, \"P\")\n            }\n        }\n\n        //update payload:\n        eventService.schedule(listOf(EventRequest(xrefId, eventTime.toString(), tenant, \"P1\"))).apply {\n            assertEquals(status, 200)\n        }\n        eventService.find(xrefId, tenant).apply {\n            assertEquals(status, 200)\n            (entity as EventResponse).apply {\n                assertEquals(ZonedDateTime.parse(this.eventTime), eventTime)\n                assertEquals(payload, \"P1\")\n            }\n        }\n\n        // update time:\n        eventService.schedule(listOf(EventRequest(xrefId, eventTime.plusMinutes(1).toString(), tenant, \"P2\"))).apply {\n            assertEquals(status, 200)\n        }\n        eventService.find(xrefId, tenant).apply {\n            assertEquals(status, 200)\n            (entity as EventResponse).apply {\n                assertEquals(ZonedDateTime.parse(this.eventTime), eventTime.plusMinutes(1))\n                assertEquals(payload, \"P2\")\n            }\n        }\n\n        //remove event:\n        eventService.schedule(listOf(EventRequest(xrefId, eventTime.plusMinutes(1).toString(), tenant, \"P2\", REMOVE)))\n            .apply {\n                assertEquals(status, 200)\n            }\n        eventService.find(xrefId, tenant).apply {\n            assertEquals(status, 404)\n        }\n    }\n\n    @Test\n    fun `sharding works as expected`() {\n        val r = Random()\n        val time = nowUTC().plusMinutes(2).bucket()\n        (0..100).forEach {\n            eventService.schedule(listOf(EventRequest(\"id_$it\", time.plusSeconds(r.nextInt(60).toLong()).toString(), \"default\", \"Payload_$it\")))\n        }\n        (0..100).forEach { i ->\n            eventService.find(\"id_$i\", \"default\").apply {\n                assertEquals(status, 200)\n                fetch<EventLookup> { it.xrefId = \"id_$i\"; it.tenant = \"default\" }.get()!!.apply {\n                    assertEquals(shard, i / int(\"events.receiver.shard.size\"))\n                    fetch<Event> {\n                        it.bucketId = time; it.shard = shard; it.eventTime = eventTime; it.id = eventId\n                    }.get()!!.apply {\n                        assertEquals(status, UN_PROCESSED)\n                    }\n                }\n            }\n        }\n    }\n\n    @Test\n    fun `test bucket loader`() {\n        val bucketId = nowUTC().bucket()\n        val toBeLoaded = (1..10).map { bucketId.minusMinutes(it.toLong()) }.toSet()\n\n        save<Bucket> { it.bucketId = bucketId.minusMinutes(3); it.count = 100; it.status = PROCESSED }.get()!!\n\n        val latch = CountDownLatch(10)\n        val now = System.currentTimeMillis()\n        BucketsLoader(10, 5, 60, bucketId) {\n            try {\n                assertTrue { toBeLoaded.contains(it.bucketId) }\n                if (it.bucketId == bucketId.minusMinutes(3)) {\n                    assertEquals(it.status, PROCESSED)\n                    assertEquals(it.count, 100)\n                } else {\n                    assertEquals(it.status, EMPTY)\n                }\n                latch.countDown()\n            } catch (e: Throwable) {\n                e.printStackTrace()\n            }\n        }.run()\n        if (!latch.await(1, MINUTES)) throw IllegalStateException(\"buckets registry did not complete on time\")\n        assertTrue { System.currentTimeMillis() - now > 1 }\n    }\n\n    @Test\n    fun `test bucket manager`() {\n        val time = nowUTC().bucket()\n        println(\"time : $time\")\n        val range = 0..9\n        val buckets = range.map { time.minusMinutes(it.toLong()) }.toSortedSet()\n        println(\"buckets: $buckets\")\n        val shards = range.toList()\n        // test back ground load\n        range.forEach { i ->\n            save<Bucket> {\n                it.bucketId = time.minusMinutes(i.toLong()); it.count = 100L; it.status = UN_PROCESSED\n            }.get()!!\n        }\n        val bm = BucketManager(10, 2 * 60, 60, module())\n        bm.getProcessableShardsForOrBefore(time).get()!!\n\n        sleep(2000)\n\n        bm.getProcessableShardsForOrBefore(time).get()!!.apply {\n            assertEquals(this.keySet().toSortedSet(), buckets.toMutableSet().apply { add(time) }.toSortedSet())\n            this.keySet().forEach {\n                assertEquals(this[it].toList(), shards)\n            }\n        }\n        // test purge:\n        (1..5).forEach { i ->\n            save<Bucket> {\n                it.bucketId = time.plusMinutes(i.toLong()); it.count = 100L; it.status = UN_PROCESSED\n            }.get()!!\n        }\n        (1..5).forEach { bm.getProcessableShardsForOrBefore(time.plusMinutes(it.toLong())).get()!! }\n        bm.purgeIfNeeded()\n        (1..5).forEach {\n            val b = bm.getProcessableShardsForOrBefore(time.plusMinutes(1)).get()!!\n            assertEquals(b.keySet().size, 10)\n            assertEquals(buckets - buckets.take(5) + (1..5).map { time.plusMinutes(it.toLong()) }.toSortedSet(), b.keySet().toSortedSet())\n        }\n    }\n\n    @Test\n    fun `test http processor - ok case - past event`() {\n        var server: HttpServer? = null\n        try {\n            val port = 8383\n            eventService.registerProcessor(\n                ProcessorConfig(\n                    \"http\", HTTP,\n                    mapOf\n                        (\n                        \"url\" to \"http://localhost:$port/test\",\n                        \"headers\" to mapOf(\"header\" to \"Header1\")\n                    )\n                )\n            ).apply { assertEquals(this.status, 200) }\n            val eReq = EventRequest(\"id123\", nowUTC().minusSeconds(1).toString(), \"http\", \"Payload1\")\n            println(\"event request: $eReq\")\n            server = HttpServer.create(InetSocketAddress(port), 0)\n            val latch = CountDownLatch(1)\n            server.createContext(\"/test\") {\n                try {\n                    val eResp = EventResponse::class.java.fromJson(String(it.requestBody.readBytes()))\n                    println(\"event response: $eResp\")\n                    assertEquals(it.requestHeaders.getFirst(\"header\"), \"Header1\")\n                    assertEquals(eReq.id, eResp.id)\n                    assertEquals(eReq.eventTime, eResp.eventTime)\n                    assertEquals(eReq.payload, eResp.payload)\n                    assertEquals(eReq.tenant, eResp.tenant)\n                    assertEquals(eReq.mode, eResp.mode)\n                    assertTrue(eResp.eventId == null)\n                    assertTrue(eResp.eventStatus == TRIGGERED)\n                    mapOf(\"status\" to \"OK\").json().apply {\n                        it.sendResponseHeaders(200, length.toLong())\n                        it.responseBody.write(toByteArray())\n                    }\n                } catch (e: Throwable) {\n                    e.printStackTrace()\n                    mapOf(\"status\" to \"error\").json().apply {\n                        it.sendResponseHeaders(500, length.toLong())\n                        it.responseBody.write(toByteArray())\n                    }\n                    throw AssertionError(\"test failed\")\n                } finally {\n                    it.close()\n                }\n                latch.countDown()\n            }\n            server.start()\n            eventService.schedule(listOf(eReq)).apply { assertEquals(200, status) }\n            if (!latch.await(1, MINUTES))\n                throw AssertionError(\"latch not down\")\n        } finally {\n            server?.run { stop(0) }\n        }\n    }\n\n    @Test\n    fun `test http processor - ok case - future event`() {\n        var server: HttpServer? = null\n        try {\n            val port = 8383\n            eventService.registerProcessor(\n                ProcessorConfig(\n                    \"http\", HTTP,\n                    mapOf\n                        (\n                        \"url\" to \"http://localhost:$port/test\",\n                        \"headers\" to mapOf(\"header\" to \"Header1\")\n                    )\n                )\n            ).apply { assertEquals(this.status, 200) }\n            val time = nowUTC().plusMinutes(1).withSecond(10).withNano(0)\n            val eReq = EventRequest(\"id123\", time.toString(), \"http\", \"Payload1\")\n            println(\"event request: $eReq\")\n            server = HttpServer.create(InetSocketAddress(port), 0)\n            val latch = CountDownLatch(1)\n            server.createContext(\"/test\") {\n                try {\n                    val eResp = EventResponse::class.java.fromJson(String(it.requestBody.readBytes()))\n                    println(\"event response: $eResp\")\n                    assertEquals(it.requestHeaders.getFirst(\"header\"), \"Header1\")\n                    assertEquals(eReq.id, eResp.id)\n                    assertEquals(eReq.eventTime, eResp.eventTime)\n                    assertEquals(eReq.payload, eResp.payload)\n                    assertEquals(\"Payload2\", eResp.payload)\n                    assertEquals(eReq.tenant, eResp.tenant)\n                    assertEquals(eReq.mode, eResp.mode)\n                    assertTrue(eResp.eventId != null)\n                    assertTrue(eResp.eventStatus == TRIGGERED)\n                    mapOf(\"status\" to \"OK\").json().apply {\n                        it.sendResponseHeaders(200, length.toLong())\n                        it.responseBody.write(toByteArray())\n                    }\n                } catch (e: Throwable) {\n                    e.printStackTrace()\n                    mapOf(\"status\" to \"error\").json().apply {\n                        it.sendResponseHeaders(500, length.toLong())\n                        it.responseBody.write(toByteArray())\n                    }\n                    throw AssertionError(\"test failed\")\n                } finally {\n                    it.close()\n                }\n                latch.countDown()\n            }\n            server.start()\n            eventService.schedule(listOf(eReq)).apply { assertEquals(200, status) }\n            eventService.schedule(listOf(eReq.apply { payload = \"Payload2\" })).apply { assertEquals(200, status) }\n            val bm = BucketManager(1, 2 * 60, 60, module())\n            println(\"manually scheduling $time\")\n            ScheduleScanner(module()).scan(time.withSecond(0).withNano(0), bm)\n            if (!latch.await(2, MINUTES))\n                throw AssertionError(\"latch not down\")\n        } finally {\n            server?.run { stop(0) }\n        }\n    }\n\n    @Test\n    fun `test http processor - error case`() {\n        var server: HttpServer? = null\n        try {\n            val port = 8383\n            eventService.registerProcessor(\n                ProcessorConfig(\n                    \"http\", HTTP,\n                    mapOf\n                        (\n                        \"url\" to \"http://localhost:$port/test\",\n                        \"header\" to \"Header1\"\n                    )\n                )\n            ).apply { assertEquals(this.status, 200) }\n            val eReq = EventRequest(\"id123\", nowUTC().minusSeconds(1).toString(), \"http\", \"Payload1\")\n            server = HttpServer.create(InetSocketAddress(port), 0)\n            val latch = CountDownLatch(1)\n            val tries = AtomicInteger()\n            server.createContext(\"/test\") {\n                try {\n                    tries.incrementAndGet()\n                    mapOf(\"status\" to \"error\").json().apply {\n                        it.sendResponseHeaders(500, length.toLong())\n                        it.responseBody.write(toByteArray())\n                    }\n                } finally {\n                    it.close()\n                }\n                latch.countDown()\n            }\n            server.start()\n            eventService.schedule(listOf(eReq)).apply { assertEquals(200, status) }\n            if (!latch.await(1, MINUTES))\n                throw AssertionError(\"latch not down\")\n            var passed = false\n            loop@ for (i in (1..10)) {\n                if (tries.get() != 4)\n                    sleep(1000)\n                else {\n                    passed = true; break@loop\n                }\n            }\n            assertTrue(passed)\n        } finally {\n            server?.run { stop(0) }\n        }\n    }\n\n    @Test\n    fun `test http processor - bad request case`() {\n        var server: HttpServer? = null\n        try {\n            val port = 8383\n            eventService.registerProcessor(\n                ProcessorConfig(\n                    \"http\", HTTP,\n                    mapOf\n                        (\n                        \"url\" to \"http://localhost:$port/test\",\n                        \"header\" to \"Header1\"\n                    )\n                )\n            ).apply { assertEquals(this.status, 200) }\n            val eReq = EventRequest(\"id123\", nowUTC().minusSeconds(1).toString(), \"http\", \"Payload1\")\n            server = HttpServer.create(InetSocketAddress(port), 0)\n            val latch = CountDownLatch(1)\n            val tries = AtomicInteger()\n            server.createContext(\"/test\") {\n                try {\n                    tries.incrementAndGet()\n                    mapOf(\"status\" to \"error\").json().apply {\n                        it.sendResponseHeaders(400, length.toLong())\n                        it.responseBody.write(toByteArray())\n                    }\n                } finally {\n                    it.close()\n                }\n                latch.countDown()\n            }\n            server.start()\n            eventService.schedule(listOf(eReq)).apply { assertEquals(200, status) }\n            if (!latch.await(1, MINUTES))\n                throw AssertionError(\"latch not down\")\n            var passed = false\n            loop@ for (i in (1..5)) {\n                if (tries.get() != 1)\n                    sleep(1000)\n                else {\n                    passed = true; break@loop\n                }\n            }\n            sleep(2000)\n            assertTrue(passed)\n        } finally {\n            server?.run { stop(0) }\n        }\n    }\n\n    @Test\n    fun `test kafka integration - ok case`() {\n        eventService.registerProcessor(\n            ProcessorConfig(\n                \"kafka\", MESSAGING,\n                mapOf\n                    (\n                    \"topic\" to \"test\",\n                    \"brokers.url\" to \"\"\n                )\n            )\n        ).apply { assertEquals(this.status, 200) }\n        val eReq = EventRequest(\"id123\", nowUTC().minusSeconds(1).toString(), \"kafka\", \"Payload1\")\n        eventService.schedule(listOf(eReq)).apply { assertEquals(200, status) }\n    }\n\n    @Test\n    fun `test kafka integration - error case`() {\n        val x: ScheduledThreadPoolExecutor = (TaskExecutor.Companion::class.memberProperties\n            .filter { it.name == \"RETRY_POOL\" }[0]\n            .apply { isAccessible = true }.get(TaskExecutor.Companion) as ListeningScheduledExecutorService)\n            .let {\n                it::class.java.getDeclaredField(\"delegate\").apply { isAccessible = true }.get(it)\n            } as ScheduledThreadPoolExecutor\n        val current = x.completedTaskCount\n        eventService.registerProcessor(\n            ProcessorConfig(\n                \"kafka\", MESSAGING,\n                mapOf\n                    (\n                    \"topic\" to \"test\",\n                    \"brokers.url\" to \"\",\n                    \"fail\" to true\n                )\n            )\n        ).apply { assertEquals(this.status, 200) }\n        val eReq = EventRequest(\"id123\", nowUTC().minusSeconds(1).toString(), \"kafka\", \"Payload1\")\n        eventService.schedule(listOf(eReq)).apply { assertEquals(200, status) }\n        sleep(10000)\n        assertTrue(x.completedTaskCount - current >= 3.toLong())\n    }\n\n    @Test(enabled = false)\n    fun `test kafka consumer`() {\n        eventService.registerProcessor(\n            ProcessorConfig(\n                \"kafka\", MESSAGING,\n                mapOf\n                    (\n                    \"topic\" to \"outbound\",\n                    \"brokers.url\" to \"localhost:9092\"\n                )\n            )\n        ).apply { assertEquals(this.status, 200) }\n        /*val consumer = (BigBen.messageProcessors[0] as MockKafkaProcessor).consumer\n        consumer.rebalance(setOf(TopicPartition(\"inbound\", 1)))\n        consumer.updateBeginningOffsets(mapOf(TopicPartition(\"inbound\", 0) to 1.toLong()))\n        consumer.updateBeginningOffsets(mapOf(TopicPartition(\"inbound\", 0) to Long.MAX_VALUE))*/\n        val eReq = EventRequest(\"id123\", nowUTC().minusSeconds(1).toString(), \"kafka\", \"Payload1\")\n        println(eReq.json())\n        //sleep(Long.MAX_VALUE)\n    }\n\n    @Test(enabled = false)\n    fun `end to end kafka`() {\n        eventService.registerProcessor(\n            ProcessorConfig(\n                \"kafka\", MESSAGING,\n                mapOf\n                    (\n                    \"topic\" to \"outbound\",\n                    \"bootstrap.servers\" to \"localhost:9092\"\n                )\n            )\n        ).apply { assertEquals(this.status, 200) }\n\n\n        val producer =\n            KafkaProducer<String, String>(map(\"kafka.producer.config\").mapKeys { it.key.removePrefix(\"kafka.producer.config.\") } +\n                                                  mapOf\n                                                      (\n                                                      \"topic\" to \"outbound\",\n                                                      \"bootstrap.servers\" to \"localhost:9092\"\n                                                  ))\n        (1..100).forEach {\n            println(\"sending $it\")\n            val eReq =\n                EventRequest(\"id123\", nowUTC().minusSeconds(1).toString(), \"kafka\", RandomStringGenerator.Builder().build().generate(1024))\n            producer.send(ProducerRecord(\"outbound\", eReq.json())).get()\n        }\n        sleep(3000)\n    }\n\n    @Test(enabled = false)\n    fun `test consumer`() {\n        val consumer =\n            KafkaConsumer<String, String>(map(\"kafka.consumer.config\") + mapOf(\"group.id\" to UUID.randomUUID().toString()))\n        consumer.subscribe(setOf(\"outbound\"))\n        while (true) {\n            println(\"polling outbound\")\n            val records = consumer.poll(3000)\n            println(records.count())\n            consumer.commitSync()\n        }\n    }\n}\n"
  },
  {
    "path": "app/src/test/kotlin/com/walmartlabs/bigben/tests/KafkaTests.kt",
    "content": "/*-\n * #%L\n * BigBen:commons\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.tests\n\nimport com.datastax.driver.core.Session\nimport com.google.common.util.concurrent.Futures\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.walmartlabs.bigben.BigBen\nimport com.walmartlabs.bigben.BigBen.entityProvider\nimport com.walmartlabs.bigben.BigBen.module\nimport com.walmartlabs.bigben.api.EventService\nimport com.walmartlabs.bigben.entities.EntityProvider\nimport com.walmartlabs.bigben.entities.Event\nimport com.walmartlabs.bigben.entities.EventRequest\nimport com.walmartlabs.bigben.entities.EventResponse\nimport com.walmartlabs.bigben.kafka.KafkaMessageProcessor\nimport com.walmartlabs.bigben.kafka.ProcessorImpl\nimport com.walmartlabs.bigben.processors.NoOpCustomClassProcessor\nimport com.walmartlabs.bigben.processors.ProcessorConfig\nimport com.walmartlabs.bigben.processors.ProcessorConfig.Type.CUSTOM_CLASS\nimport com.walmartlabs.bigben.processors.ProcessorConfig.Type.MESSAGING\nimport com.walmartlabs.bigben.processors.ProcessorRegistry\nimport com.walmartlabs.bigben.utils.commons.PropsLoader\nimport com.walmartlabs.bigben.utils.fromJson\nimport com.walmartlabs.bigben.utils.json\nimport org.apache.kafka.clients.consumer.ConsumerRecord\nimport org.testng.annotations.BeforeMethod\nimport org.testng.annotations.Test\nimport java.time.ZonedDateTime\nimport java.util.concurrent.CountDownLatch\nimport java.util.concurrent.TimeUnit\nimport java.util.concurrent.atomic.AtomicInteger\n\n/**\n * Created by smalik3 on 9/19/18\n */\nclass KafkaTests {\n\n    companion object {\n        val latch = CountDownLatch(1)\n\n        init {\n            System.setProperty(\"bigben.props\", \"file://bigben-kafka-test.yaml\")\n            BigBen.init()\n        }\n    }\n\n    @BeforeMethod\n    private fun `clean up db`() {\n        println(\"cleaning up the db\")\n        try {\n            (BigBen.module<EntityProvider<Any>>().unwrap() as Session).apply {\n                execute(\"truncate bigben.events;\")\n                execute(\"truncate bigben.lookups;\")\n                execute(\"truncate bigben.buckets;\")\n                execute(\"truncate bigben.kv_table;\")\n            }\n        } catch (e: Throwable) {\n            e.printStackTrace()\n            throw e\n        }\n        println(\"setting up tenant\")\n        module<EventService>().registerProcessor(ProcessorConfig(\"tenant1\", CUSTOM_CLASS,\n                mapOf(\"eventProcessorClass\" to NoOpCustomClassProcessor::class.java.name)))\n    }\n\n    @Test(enabled = false)\n    fun `test kafka integration`() {\n        println(\"in kafka tests\")\n\n        module<EventService>().registerProcessor(ProcessorConfig(\"tenant2\", MESSAGING,\n                mapOf(\n                        \"topic\" to \"topic1\",\n                        \"bootstrap.servers\" to \"localhost:9092\"\n                )))\n        (1..1).forEach {\n            entityProvider<Event>().let { it.raw(it.selector(Event::class.java)) }.apply {\n                id = \"id_$it\"\n                eventTime = ZonedDateTime.parse(\"2018-09-19T20:42Z\")\n                payload = \"Payload_$it\"\n                tenant = \"tenant2\"\n                xrefId = \"xref_$id\"\n            }.apply { module<ProcessorRegistry>().invoke(this).get() }\n        }\n        if (!latch.await(2, TimeUnit.MINUTES))\n            throw AssertionError(\"test failed\")\n    }\n}\n\nclass MockProcessorImpl(props: PropsLoader) : KafkaMessageProcessor(props) {\n    private val impl = ProcessorImpl(props)\n    private val counter = AtomicInteger(0)\n\n    init {\n        println(\"---> starting the kafka consumer\")\n        println(EventRequest(\"id123\", \"2018-09-19T20:42Z\", \"tenant1\", \"Payload1\").json())\n    }\n\n    override fun process(cr: ConsumerRecord<String, String>): ListenableFuture<Any> {\n        println(\"got a new record: ${cr.value()}\")\n        val er = EventResponse::class.java.fromJson(cr.value())\n        val cr2 = ConsumerRecord(cr.topic(), cr.partition(), cr.offset(), cr.key(), er.run {\n            EventRequest(id, eventTime, tenant, payload).json()\n        })\n        impl.process(cr2).get()\n        counter.incrementAndGet()\n        if (counter.get() == 10) KafkaTests.latch.countDown()\n        return Futures.immediateFuture(cr)\n    }\n}"
  },
  {
    "path": "app/src/test/resources/bigben-api-test.yaml",
    "content": "# top level modules\nmodules:\n  - name: domain\n    class: com.walmartlabs.bigben.providers.domain.cassandra.CassandraModule\n  - name: processors\n    object: com.walmartlabs.bigben.processors.ProcessorRegistry\n  - name: hz\n    class: com.walmartlabs.bigben.utils.hz.Hz\n  - name: scheduler\n    object: com.walmartlabs.bigben.SchedulerModule\n  - name: events\n    object: com.walmartlabs.bigben.EventModule\n  - name: messaging\n    object: com.walmartlabs.bigben.kafka.KafkaModule\n    enabled: false\n  - name: cron\n    object: com.walmartlabs.bigben.cron.CronRunner\n    enabled: false\n\n# hazelcast properties\nhz:\n  template: file://hz.template.xml\n  group:\n    name: bigben-dev\n    password: bigben-dev\n  network:\n    autoIncrementPort: true\n    members: 127.0.0.1\n    port: 5701\n  map:\n    store:\n      writeDelay: 30\n\n# cassandra related properties\ncassandra:\n  keyspace: bigben\n  cluster:\n    contactPoints: 127.0.0.1\n    clusterName: bigben-cluster\n    port: 9042\n    localDataCenter: null\n    coreConnectionsPerHost: 8\n    maxHostsPerConnection: 32768\n    keepTCPConnectionAlive: true\n    connectionTimeOut: 5000\n    readTimeout: 12000\n    reconnectPeriod: 5\n    username: null\n    password: null\n    downgradingConsistency: false\n    writeConsistency: \"LOCAL_QUORUM\"\n    readConsistency: \"LOCAL_QUORUM\"\n\n# kafka related properties\nkafka:\n  consumers:\n    - num.consumers: 8\n      processor.class: com.walmartlabs.bigben.kafka.ProcessorImpl\n      topics: null\n      max.poll.wait.time: 10000\n      message.retry.max.count: 10\n      unknown.exception.retries: 3\n      config:\n        key.deserializer: org.apache.kafka.common.serialization.StringDeserializer\n        value.deserializer: org.apache.kafka.common.serialization.StringDeserializer\n        bootstrap.servers: null\n        #fetch.min.bytes: 1\n        group.id: bigben-inbound\n        #heartbeat.interval.ms: 3000\n        session.timeout.ms: 30000\n        auto.offset.reset: earliest\n        fetch.max.bytes: 324000\n        max.poll.interval.ms: 30000\n        max.poll.records: 100\n        receive.buffer.bytes: 65536\n        request.timeout.ms: 60000\n        #send.buffer.bytes: 131072\n        enable.auto.commit: false\n  producer:\n    config: # this is default kafka producer config, these values will be used if not supplied during the tenant registration\n      key.serializer: org.apache.kafka.common.serialization.StringSerializer\n      value.serializer: org.apache.kafka.common.serialization.StringSerializer\n      acks: \"1\"\n      buffer.memory: 32400\n      retries: 3\n\n# system properties\ntask:\n  executor:\n    #retry.thread.count: 8\n    retry.time.units: SECONDS\n    delay: 1\n    max.retries: 3\n    backoff.multiplier: 2\n\n# scheduler / event related properties\nevents:\n  scheduler.enabled: true\n  schedule.scan.interval.minutes: 1\n  num.shard.submitters: 8\n  receiver:\n    shard.size: 1000\n    lapse.offset.minutes: 0\n    delete:\n      max.retries: 3\n      initial.delay: 1\n      backoff.multiplier: 1\n  submit:\n    initial.delay: 1\n    backoff.multiplier: 1\n    max.retries: 3\n  processor:\n    max.retries: 3\n    initial.delay: 1\n    backoff.multiplier: 2\n    eager.loading: true\n  tasks:\n    max.events.in.memory: 100000\n    scheduler.worker.threads: 8\n\n# bucket manager / loader related properties\nbuckets:\n  backlog.check.limit: 300\n  background:\n    load.fetch.size: 100\n    load.wait.interval.seconds: 15\n\n# cron related properties\ncron:\n  runner:\n    core.pool.size: 8\n  load:\n    max.retries: 10\n    delay: 1\n    backoff.multiplier: 1\n    time.units: \"SECONDS\"\n\nmessaging.producer.factory.class: com.walmartlabs.bigben.kafka.MockMessageProducerFactory\ngeneric.future.max.get.time: 60"
  },
  {
    "path": "app/src/test/resources/bigben-kafka-test.yaml",
    "content": "# top level modules\nmodules:\n  - name: domain\n    class: com.walmartlabs.bigben.providers.domain.cassandra.CassandraModule\n  - name: processors\n    object: com.walmartlabs.bigben.processors.ProcessorRegistry\n  - name: hz\n    class: com.walmartlabs.bigben.utils.hz.Hz\n  - name: scheduler\n    object: com.walmartlabs.bigben.SchedulerModule\n  - name: events\n    object: com.walmartlabs.bigben.EventModule\n  - name: kafka\n    object: com.walmartlabs.bigben.kafka.KafkaModule\n\n# hazelcast properties\nhz:\n  template: /hz.template.xml\n  group:\n    name: bigben-dev\n    password: bigben-dev\n  network:\n    autoIncrementPort: true\n    members: 127.0.0.1\n    port: 5701\n  map:\n    store:\n      writeDelay: 30\n\n# cassandra related properties\ncassandra:\n  keyspace: bigben\n  cluster:\n    contactPoints: 127.0.0.1\n    clusterName: bigben-cluster\n    port: 9042\n    localDataCenter: null\n    coreConnectionsPerHost: 8\n    maxHostsPerConnection: 32768\n    keepTCPConnectionAlive: true\n    connectionTimeOut: 5000\n    readTimeout: 12000\n    reconnectPeriod: 5\n    username: null\n    password: null\n    downgradingConsistency: false\n    writeConsistency: \"LOCAL_QUORUM\"\n    readConsistency: \"LOCAL_QUORUM\"\n\n# system properties\ntask:\n  executor:\n    #retry.thread.count: 8\n    retry.time.units: SECONDS\n    delay: 1\n    max.retries: 3\n    backoff.multiplier: 2\n\n# kafka related properties\nkafka:\n  consumers:\n    - num.consumers: 1\n      processor.impl.class: com.walmartlabs.bigben.tests.MockProcessorImpl\n      topics: topic1\n      max.poll.wait.time: 10000\n      message.retry.max.count: 10\n      unknown.exception.retries: 3\n      config:\n        key.deserializer: org.apache.kafka.common.serialization.StringDeserializer\n        value.deserializer: org.apache.kafka.common.serialization.StringDeserializer\n        bootstrap.servers: localhost:9092\n        #fetch.min.bytes: 1\n        group.id: bigben-kafka-test\n        #heartbeat.interval.ms: 3000\n        session.timeout.ms: 30000\n        auto.offset.reset: earliest\n        fetch.max.bytes: 324000\n        max.poll.interval.ms: 30000\n        max.poll.records: 100\n        receive.buffer.bytes: 65536\n        request.timeout.ms: 60000\n        #send.buffer.bytes: 131072\n        enable.auto.commit: false\n  producer:\n    config: # this is default kafka producer config, these values will be used if not supplied during the tenant registration\n      key.serializer: org.apache.kafka.common.serialization.StringSerializer\n      value.serializer: org.apache.kafka.common.serialization.StringSerializer\n      acks: \"1\"\n      buffer.memory: 32400\n      retries: 3\n\nmessaging.producer.factory.class: com.walmartlabs.bigben.kafka.KafkaMessageProducerFactory\ngeneric.future.max.get.time: 60\n\n# scheduler / event related properties\nevents:\n  scheduler.enabled: false\n  schedule.scan.interval.minutes: 1\n  num.shard.submitters: 8\n  receiver:\n    shard.size: 10\n    lapse.offset.minutes: 0\n    delete:\n      max.retries: 3\n      initial.delay: 1\n      backoff.multiplier: 1\n  submit:\n    initial.delay: 1\n    backoff.multiplier: 1\n    max.retries: 3\n  processor:\n    max.retries: 3\n    initial.delay: 1\n    backoff.multiplier: 2\n    eager.loading: true\n  tasks:\n    max.events.in.memory: 100000\n    scheduler.worker.threads: 8\n\n# bucket manager / loader related properties\nbuckets:\n  backlog.check.limit: 1\n  background:\n    load.fetch.size: 10\n    load.wait.interval.seconds: 1\n  checkpoint:\n    interval: 60\n    interval.units: SECONDS"
  },
  {
    "path": "app/src/test/resources/bigben-test.yaml",
    "content": "events:\n  receiver:\n    shard.size: 10\n  processor.eager.loading: false"
  },
  {
    "path": "app/src/test/resources/log4j.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!--\n  #%L\n  BigBen:app\n  =======================================\n  Copyright (C) 2016 - 2018 Walmart Inc.\n  =======================================\n  Licensed under the Apache License, Version 2.0 (the \"License\");\n  you may not use this file except in compliance with the License.\n  You may obtain a copy of the License at\n  \n       http://www.apache.org/licenses/LICENSE-2.0\n  \n  Unless required by applicable law or agreed to in writing, software\n  distributed under the License is distributed on an \"AS IS\" BASIS,\n  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  See the License for the specific language governing permissions and\n  limitations under the License.\n  #L%\n  -->\n\n<!DOCTYPE log4j:configuration SYSTEM \"log4j.dtd\">\n<log4j:configuration debug=\"true\"\n                     xmlns:log4j='http://jakarta.apache.org/log4j/'\n                     xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n                     xsi:schemaLocation=\"http://jakarta.apache.org/log4j/ \">\n\n    <appender name=\"console\" class=\"org.apache.log4j.ConsoleAppender\">\n        <layout class=\"org.apache.log4j.PatternLayout\">\n            <param name=\"ConversionPattern\"\n                   value=\"%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%t] %c{1}:%L - %m%n\"/>\n        </layout>\n    </appender>\n\n    <logger name=\"com.walmartlabs.bigben\" additivity=\"false\">\n        <level value=\"INFO\"/>\n        <appender-ref ref=\"console\"/>\n    </logger>\n\n</log4j:configuration>\n"
  },
  {
    "path": "build/configs/log4j.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!--\n  #%L\n  BigBen:app\n  =======================================\n  Copyright (C) 2016 - 2018 Walmart Inc.\n  =======================================\n  Licensed under the Apache License, Version 2.0 (the \"License\");\n  you may not use this file except in compliance with the License.\n  You may obtain a copy of the License at\n  \n       http://www.apache.org/licenses/LICENSE-2.0\n  \n  Unless required by applicable law or agreed to in writing, software\n  distributed under the License is distributed on an \"AS IS\" BASIS,\n  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  See the License for the specific language governing permissions and\n  limitations under the License.\n  #L%\n  -->\n\n<!DOCTYPE log4j:configuration SYSTEM \"log4j.dtd\">\n<log4j:configuration debug=\"true\"\n                     xmlns:log4j='http://jakarta.apache.org/log4j/'\n                     xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n                     xsi:schemaLocation=\"http://jakarta.apache.org/log4j/ \">\n\n    <appender name=\"console\" class=\"org.apache.log4j.ConsoleAppender\">\n        <layout class=\"org.apache.log4j.PatternLayout\">\n            <param name=\"ConversionPattern\" value=\"%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%t] %c{1}:%L - %m%n\"/>\n        </layout>\n    </appender>\n\n    <appender name=\"file\" class=\"org.apache.log4j.RollingFileAppender\">\n        <param name=\"append\" value=\"false\"/>\n        <param name=\"maxFileSize\" value=\"100MB\"/>\n        <param name=\"maxBackupIndex\" value=\"3\"/>\n        <param name=\"file\" value=\"${bigben.log.file}\"/>\n        <layout class=\"org.apache.log4j.PatternLayout\">\n            <param name=\"ConversionPattern\" value=\"%d{yyyy-MM-dd HH:mm:ss} %-5p [%t] %c{1}:%L - %m%n\"/>\n        </layout>\n    </appender>\n\n    <root>\n        <level value=\"INFO\"/>\n        <appender-ref ref=\"console\"/>\n        <appender-ref ref=\"file\"/>\n    </root>\n\n    <logger name=\"com.walmartlabs.bigben\" additivity=\"false\">\n        <level value=\"INFO\"/>\n        <appender-ref ref=\"console\"/>\n        <appender-ref ref=\"file\"/>\n    </logger>\n\n</log4j:configuration>"
  },
  {
    "path": "build/configs/overrides.yaml",
    "content": "hz:\n  network.members: ${HZ_MEMBER_IPS}\ncassandra.cluster.contactPoints: ${CASSANDRA_SEED_IPS}\nbigben.inbound.topic:\n  name: some_topic\n  bootstrap.servers: some_servers\n#events.scheduler.enabled: false"
  },
  {
    "path": "build/docker/Dockerfile",
    "content": "FROM openjdk:8-jre-alpine\n\nENV APPLICATION_USER bigben\nRUN adduser -D -g '' $APPLICATION_USER\nENV APP_ROOT /dist\nRUN if [ -d \"$APP_ROOT\" ]; then rm -Rf $APP_ROOT; fi\nRUN mkdir $APP_ROOT\nRUN chown -R $APPLICATION_USER $APP_ROOT\nUSER $APPLICATION_USER\nCOPY ./build/bin/bigben.jar $APP_ROOT/bigben.jar\nCOPY ./build/docker/start.sh $APP_ROOT/start.sh\nUSER root\nRUN chmod +x $APP_ROOT/start.sh\nWORKDIR $APP_ROOT\nEXPOSE 8080 5701\n\nCMD [\"sh\", \"-c\", \"$APP_ROOT/start.sh\"]"
  },
  {
    "path": "build/docker/app_run.sh",
    "content": "#!/usr/bin/env bash\nset -e\nAPP_CONTAINER_NAME=${APP_CONTAINER_NAME:-bigben_app}\nSERVER_PORT=${SERVER_PORT:-8080}\nHZ_PORT=5701\nNUM_INSTANCES=${NUM_INSTANCES:-1}\nAPP_ROOT=/dist\nBUILD_DIR=${PWD}/..\nLOGS_DIR=${LOGS_DIR:-${BUILD_DIR}/../../bigben_logs}\n\nHOST_IP=${HOST_IP:-`ifconfig | grep -Eo 'inet (addr:)?([0-9]*\\.){3}[0-9]*' | grep -Eo '([0-9]*\\.){3}[0-9]*' | grep -v '127.0.0.1'`}\nCASSANDRA_SEED_IPS=${CASSANDRA_SEED_IPS:-${HOST_IP}}\nHZ_MEMBER_IPS=${HZ_MEMBER_IPS:-${HOST_IP}}\n\nDEFAULT_JAVA_OPTS=\"-server -XX:+UnlockExperimentalVMOptions -XX:InitialRAMFraction=2 -XX:MinRAMFraction=2 -XX:+UseG1GC -XX:MaxGCPauseMillis=100 -XX:+UseStringDeduplication\"\nJAVA_OPTS=${JAVA_OPTS}\n\nif [[ \"x${JAVA_OPTS}\" != \"x\" ]]; then\n    JAVA_OPTS=\"${DEFAULT_JAVA_OPTS} ${JAVA_OPTS}\"\nelse\n    JAVA_OPTS=\"${DEFAULT_JAVA_OPTS}\"\nfi\n\necho HOST_IP: ${HOST_IP}, SERVER_PORT: ${SERVER_PORT}, \\\nHZ_MEMBER_IPS: ${HZ_MEMBER_IPS}, CASSANDRA_SEED_IPS: ${CASSANDRA_SEED_IPS}, \\\nHZ_PORT: ${HZ_PORT}, NUM_INSTANCES: ${NUM_INSTANCES}\n\nfunction stop() {\n    echo \"stopping app servers, if any\"\n    i=1\n    while [[  ${i} -lt $(($NUM_INSTANCES + 1)) ]]; do\n        app_port=$((${SERVER_PORT} + 101 * $((i - 1))))\n        echo \"stopping ${APP_CONTAINER_NAME}_$app_port\"\n        docker stop \"${APP_CONTAINER_NAME}_$app_port\" || true\n        let i=i+1\n    done\n}\n\nfunction start() {\n    echo \"starting ${NUM_INSTANCES} app node(s)\"\n    i=1\n    while [[  ${i} -lt $(($NUM_INSTANCES + 1)) ]]; do\n        app_port=$((${SERVER_PORT} + 101 * $((i - 1))))\n        hz_port=$((${HZ_PORT} + i - 1))\n        echo \"starting ${APP_CONTAINER_NAME}_$app_port at app port: $app_port, hz port: $hz_port\"\n        docker run -d --rm \\\n        -p ${app_port}:${SERVER_PORT} \\\n        -p ${hz_port}:${HZ_PORT} \\\n        -v ${BUILD_DIR}/bin/bigben.yaml:${APP_ROOT}/bigben.yaml \\\n        -v ${BUILD_DIR}/configs/overrides.yaml:${APP_ROOT}/overrides.yaml \\\n        -v ${BUILD_DIR}/configs/log4j.xml:${APP_ROOT}/log4j.xml \\\n        -v ${LOGS_DIR}:${APP_ROOT}/logs \\\n        -e HOST_IP=\"${HOST_IP}\" \\\n        -e CASSANDRA_SEED_IPS=\"${CASSANDRA_SEED_IPS}\" \\\n        -e HZ_MEMBER_IPS=\"${HZ_MEMBER_IPS}\" \\\n        -e JAVA_OPTS=\"${JAVA_OPTS} -Dbigben.configs=uri://${APP_ROOT}/overrides.yaml,uri://${APP_ROOT}/bigben.yaml \\\n        -Dapp.server.port=${SERVER_PORT} \\\n        -Dbigben.log.file=${APP_ROOT}/logs/bigben_app_${app_port}.log \\\n        -Dbigben.log.config=${APP_ROOT} \\\n        -Dhazelcast.local.publicAddress=${HOST_IP}:${hz_port}\" \\\n        --name \"${APP_CONTAINER_NAME}_$app_port\" sandeepmalik/bigben:1\n        let i=i+1\n    done\n    echo \"waiting for app servers to boot up\"\n    i=1\n    while [[  ${i} -lt $(($NUM_INSTANCES + 1)) ]]; do\n        app_server_docker_ip=`docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' \"${APP_CONTAINER_NAME}_$app_port\"`\n        echo \"waiting for app server ${APP_CONTAINER_NAME}_$app_port, docker ip: $app_server_docker_ip\"\n        docker run --rm dadarek/wait-for-dependencies ${app_server_docker_ip}:${SERVER_PORT}\n        let i=i+1\n    done\n}\n\nif [[ $1 == \"start\" ]]; then\n    start\nelif [[ $1 == \"stop\" ]]; then\n    stop\nelse\n    stop\n    start\nfi\n"
  },
  {
    "path": "build/docker/cassandra_run.sh",
    "content": "#!/usr/bin/env bash\nset -e\nCASSANDRA_CONTAINER_NAME=${CASSANDRA_CONTAINER_NAME:-bigben_cassandra}\nCASSANDRA_PORT=${CASSANDRA_PORT:-9042}\nCASSANDRA_GOSSIP_PORT=${CASSANDRA_GOSSIP_PORT:-7000}\nHOST_IP=${HOST_IP:-`ifconfig | grep -Eo 'inet (addr:)?([0-9]*\\.){3}[0-9]*' | grep -Eo '([0-9]*\\.){3}[0-9]*' | grep -v '127.0.0.1'`}\necho \"determined host ip: $HOST_IP\"\necho \"stopping ${CASSANDRA_CONTAINER_NAME}, if running\"\ndocker stop ${CASSANDRA_CONTAINER_NAME} || true\necho \"starting ${CASSANDRA_CONTAINER_NAME}\"\ndocker run -d --rm \\\n-p ${CASSANDRA_PORT}:${CASSANDRA_PORT} \\\n-e CASSANDRA_BROADCAST_ADDRESS=${HOST_IP} \\\n-p ${CASSANDRA_GOSSIP_PORT}:${CASSANDRA_GOSSIP_PORT} \\\n-v ${PWD}/../bin/bigben-schema.cql:/tmp/bigben-schema.cql \\\n--name ${CASSANDRA_CONTAINER_NAME} cassandra\nCASSANDRA_DOCKER_IP=`docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' ${CASSANDRA_CONTAINER_NAME}`\necho \"${CASSANDRA_CONTAINER_NAME} docker ip: ${CASSANDRA_DOCKER_IP}\"\necho \"waiting for ${CASSANDRA_CONTAINER_NAME} to boot up\"\ndocker run --rm dadarek/wait-for-dependencies ${CASSANDRA_DOCKER_IP}:${CASSANDRA_PORT}\necho \"creating bigben schema\"\ndocker exec -it ${CASSANDRA_CONTAINER_NAME} cqlsh -f /tmp/bigben-schema.cql"
  },
  {
    "path": "build/docker/cleanup.sh",
    "content": "#!/usr/bin/env bash\ndocker stop $(docker ps -a -q)\ndocker rm $(docker ps -a -q)"
  },
  {
    "path": "build/docker/deploy.sh",
    "content": "#!/usr/bin/env bash\nset -e\n./docker_build.sh\ndocker push sandeepmalik/bigben:1"
  },
  {
    "path": "build/docker/docker-compose.yml",
    "content": "version: '3'\n\nservices:\n  cassandra:\n    image: cassandra:3\n    container_name: cassandra\n    hostname: cassandra\n    restart: on-failure\n    volumes:\n      - ../cassandra/src/main/resources/bigben-schema.cql:/tmp/bigben-schema.cql\n\n  bigben:\n    image: sandeepmalik/bigben:1\n    hostname: bigben\n    container_name: bigben\n    volumes:\n      - ../app/src/main/resources/bigben.yaml:/dist/bigben-config.yaml\n      - ./configs/overrides.yaml:/dist/bigben-overrides.yaml\n      - ./configs/log4j.xml:/dist/log4j-overrides.xml\n      - ./configs/hz.xml:/dist/hz.xml\n\n  setup_cassandra:\n    image: dadarek/wait-for-dependencies\n    container_name: setup_cassandra\n    depends_on:\n      - cassandra\n    command: cassandra:9042"
  },
  {
    "path": "build/docker/docker_build.sh",
    "content": "#!/usr/bin/env bash\nset -e\n../exec/build.sh\ncd ../..\ndocker build -f build/docker/Dockerfile -t sandeepmalik/bigben:1 .\ncd build/docker"
  },
  {
    "path": "build/docker/single_node_run.sh",
    "content": "#!/usr/bin/env bash\nset -e\n./cassandra_run.sh\nexport NUM_INSTANCES=1\n./app_run.sh"
  },
  {
    "path": "build/docker/start.sh",
    "content": "#!/bin/sh\nDEFAULT_JAVA_OPTS=\"-server -XX:+UnlockExperimentalVMOptions -XX:InitialRAMFraction=2 -XX:MinRAMFraction=2 -XX:+UseG1GC -XX:MaxGCPauseMillis=100 -XX:+UseStringDeduplication\"\nif [[ \"x${JAVA_OPTS}\" != \"x\" ]]; then export JAVA_OPTS=\"${DEFAULT_JAVA_OPTS} ${JAVA_OPTS}\"; else export JAVA_OPTS=\"${DEFAULT_JAVA_OPTS}\"; fi\necho \"using JAVA_OPTS: ${JAVA_OPTS}\"\njava ${JAVA_OPTS} -jar bigben.jar"
  },
  {
    "path": "build/exec/app_run.sh",
    "content": "#!/usr/bin/env bash\n\nexport HOST_IP=${HOST_IP:-`ifconfig | grep -Eo 'inet (addr:)?([0-9]*\\.){3}[0-9]*' | grep -Eo '([0-9]*\\.){3}[0-9]*' | grep -v '127.0.0.1'`}\nexport SERVER_PORT=${SERVER_PORT:-8080}\nAPP_ROOT=${PWD}/../configs\nexport HZ_MEMBER_IPS=${HZ_MEMBER_IPS:-${HOST_IP}}\nexport CASSANDRA_SEED_IPS=${CASSANDRA_SEED_IPS:-${HOST_IP}}\nexport LOGS_DIR=${LOGS_DIR:-${APP_ROOT}/../../../bigben_logs}\n\nNUM_INSTANCES=${NUM_INSTANCES:-1}\nHZ_PORT=${HZ_PORT:-5701}\n\necho HOST_IP: ${HOST_IP}, SERVER_PORT: ${SERVER_PORT}, \\\nHZ_MEMBER_IPS: ${HZ_MEMBER_IPS}, CASSANDRA_SEED_IPS: ${CASSANDRA_SEED_IPS}, \\\nHZ_PORT: ${HZ_PORT}, NUM_INSTANCES: ${NUM_INSTANCES}\n\nDEFAULT_JAVA_OPTS=\"-server -XX:+UnlockExperimentalVMOptions -XX:InitialRAMFraction=2 -XX:MinRAMFraction=2 -XX:+UseG1GC -XX:MaxGCPauseMillis=100 -XX:+UseStringDeduplication\"\nJAVA_OPTS=${JAVA_OPTS}\necho \"using JAVA_OPTS: ${JAVA_OPTS}\"\n\nif [[ \"x${JAVA_OPTS}\" != \"x\" ]]; then\n    export JAVA_OPTS=\"${DEFAULT_JAVA_OPTS} ${JAVA_OPTS}\"\nelse\n    export JAVA_OPTS=\"${DEFAULT_JAVA_OPTS}\"\nfi\n\necho \"starting ${NUM_INSTANCES} app node(s)\"\n\ni=1\nwhile [[  ${i} -lt $(($NUM_INSTANCES + 1)) ]]; do\n    app_port=$((${SERVER_PORT} + 101 * $((i - 1))))\n    hz_port=$((${HZ_PORT} + i - 1))\n    echo \"starting node $i at app port: $app_port, hz port: $hz_port, logs: ${LOGS_DIR}/bigben_app_${app_port}.log\"\n    LOG_FILE=\"${LOGS_DIR}/bigben_app_${app_port}.log\"\n    java ${JAVA_OPTS} \\\n        -Dbigben.log.config=${APP_ROOT}/log4j.xml \\\n        -Dbigben.log.file=${LOG_FILE} \\\n        -Dapp.server.port=${app_port} \\\n        -Dbigben.configs=\"uri://${APP_ROOT}/overrides.yaml,uri://${APP_ROOT}/../bin/bigben.yaml\" \\\n        -Dhz.network.port=${hz_port} \\\n        -jar ../bin/bigben.jar > /dev/null &\n    if [[ ${NUM_INSTANCES} == 1 ]]; then\n       tail -f ${LOG_FILE}\n    fi\n    let i=i+1\ndone"
  },
  {
    "path": "build/exec/build.sh",
    "content": "#!/usr/bin/env bash\nset -e\ncd ../..\nmvn clean install\nrm -rf build/bin || true\nmkdir build/bin\ncp app/target/bigben.jar build/bin/\ncp cassandra/src/main/resources/bigben-schema.cql ./build/bin/bigben-schema.cql\ncp app/src/main/resources/bigben.yaml ./build/bin/bigben.yaml\n"
  },
  {
    "path": "build/exec/cleanup.sh",
    "content": "#!/usr/bin/env bash\nset -e\nps aux | grep bigben.jar | grep -v grep | awk '{print $2}' | xargs kill -9"
  },
  {
    "path": "cassandra/LICENSE.txt",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2018 Sandeep Malik\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "cassandra/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n    <parent>\n        <artifactId>bigben</artifactId>\n        <groupId>com.walmartlabs.bigben</groupId>\n        <version>1.0.7-SNAPSHOT</version>\n    </parent>\n\n    <artifactId>bigben-cassandra</artifactId>\n    <packaging>takari-jar</packaging>\n    <name>BigBen:cassandra</name>\n\n    <properties>\n        <cassandra-driver.version>3.2.0</cassandra-driver.version>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.walmartlabs.bigben</groupId>\n            <artifactId>bigben-lib</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.datastax.cassandra</groupId>\n            <artifactId>cassandra-driver-core</artifactId>\n            <version>${cassandra-driver.version}</version>\n        </dependency>\n        <dependency>\n            <groupId>com.datastax.cassandra</groupId>\n            <artifactId>cassandra-driver-mapping</artifactId>\n            <version>${cassandra-driver.version}</version>\n        </dependency>\n        <dependency>\n            <groupId>org.testng</groupId>\n            <artifactId>testng</artifactId>\n            <scope>test</scope>\n        </dependency>\n    </dependencies>\n\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.jetbrains.kotlin</groupId>\n                <artifactId>kotlin-maven-plugin</artifactId>\n            </plugin>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-surefire-plugin</artifactId>\n                <configuration>\n                    <systemPropertyVariables>\n                        <buildDirectory>${project.build.directory}</buildDirectory>\n                    </systemPropertyVariables>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n</project>"
  },
  {
    "path": "cassandra/src/main/kotlin/com/walmartlabs/bigben/providers/domain/cassandra/CassandraModule.kt",
    "content": "/*-\n * #%L\n * BigBen:cassandra\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.providers.domain.cassandra\n\nimport com.datastax.driver.core.*\nimport com.datastax.driver.core.HostDistance.LOCAL\nimport com.datastax.driver.core.HostDistance.REMOTE\nimport com.datastax.driver.core.policies.*\nimport com.datastax.driver.mapping.Mapper\nimport com.datastax.driver.mapping.Mapper.Option.consistencyLevel\nimport com.datastax.driver.mapping.Mapper.Option.saveNullFields\nimport com.datastax.driver.mapping.MappingManager\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.walmartlabs.bigben.entities.*\nimport com.walmartlabs.bigben.extns.nowUTC\nimport com.walmartlabs.bigben.utils.commons.Module\nimport com.walmartlabs.bigben.utils.commons.ModuleRegistry\nimport com.walmartlabs.bigben.utils.commons.Props.map\nimport com.walmartlabs.bigben.utils.commons.Props.string\nimport com.walmartlabs.bigben.utils.fromJson\nimport com.walmartlabs.bigben.utils.json\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.transform\nimport java.time.ZonedDateTime\n\n/**\n * Created by smalik3 on 3/2/18\n */\nopen class CassandraModule<T : Any> : EntityProvider<T>, ClusterFactory, EventLoader, Module {\n\n    companion object {\n        private val l = logger<CassandraModule<*>>()\n        private val cluster: Cluster\n        val mappingManager: MappingManager\n        private val loaderQuery: PreparedStatement\n        private val kvAllQuery: PreparedStatement\n        private val session: Session\n\n        private val clusterConfig = ClusterConfig::class.java.fromJson(map(\"cassandra.cluster\").json())\n        private val writeConsistency = consistencyLevel(clusterConfig.writeConsistency)\n        private val readConsistency = consistencyLevel(clusterConfig.readConsistency)\n\n        init {\n            l.info(\"initialing the Cassandra module\")\n            cluster = (Class.forName(string(\"domain.cluster.factory.class\", CassandraModule::class.java.name)).newInstance() as ClusterFactory).create()\n            session = cluster.connect(string(\"cassandra.keyspace\"))\n            mappingManager = MappingManager(session)\n            loaderQuery = mappingManager.session.prepare(\"SELECT * FROM ${session.loggedKeyspace}.events WHERE bucket_id = ? AND shard = ? AND (event_time, id) > (?,?) LIMIT ?;\")\n            kvAllQuery = mappingManager.session.prepare(\"SELECT * FROM ${session.loggedKeyspace}.kv_table WHERE key = ?;\")\n        }\n    }\n\n    override fun init(registry: ModuleRegistry) {\n    }\n\n    @Suppress(\"UNCHECKED_CAST\")\n    override fun selector(type: Class<T>): T {\n        return when (type) {\n            Event::class.java -> EventC() as T\n            Bucket::class.java -> BucketC() as T\n            EventLookup::class.java -> EventLookupC() as T\n            KV::class.java -> KVC() as T\n            else -> throw IllegalArgumentException(\"unknown entity $type\")\n        }\n    }\n\n    override fun raw(selector: T) = selector\n\n    override fun kvs(selector: KV): ListenableFuture<List<KV>> {\n        require(selector.key != null) { \"key must be provided\" }\n        return session.executeAsync(kvAllQuery.bind(selector.key)).transform {\n            it?.run { mappingManager.mapper(KVC::class.java).map(this).map { it } } ?: emptyList()\n        }\n    }\n\n    override fun fetch(selector: T): ListenableFuture<T?> {\n        return mappingManager.mapper(selector::class.java).let {\n            when (selector) {\n                is EventC -> {\n                    require(\n                        selector.eventTime != null && selector.id != null &&\n                                selector.shard != null && selector.shard!! >= 0\n                    ) { \"event keys not provided: $selector\" }\n                    it.getAsync(selector.bucketId, selector.shard, selector.eventTime, selector.id, readConsistency).transform { it }\n                }\n                is BucketC -> {\n                    require(selector.bucketId != null) { \"bucket id not provided: $selector\" }\n                    it.getAsync(selector.bucketId, readConsistency).transform { it }\n                }\n                is EventLookupC -> {\n                    require(selector.tenant != null && selector.xrefId != null) { \"look up keys not provided: $selector\" }\n                    it.getAsync(selector.tenant, selector.xrefId, readConsistency).transform { it }\n                }\n                is KVC -> {\n                    require(selector.key != null && selector.column != null) { \"kv keys not provided: $selector\" }\n                    it.getAsync(selector.key, selector.column, readConsistency).transform { it }\n                }\n                else -> throw IllegalArgumentException(\"unknown selector: $selector\")\n            }\n        }.apply {\n            transform { if (l.isDebugEnabled) l.debug(\"fetched entity: {}\", it) }\n        }\n    }\n\n    override fun save(selector: T): ListenableFuture<T> {\n        return mappingManager.mapper(selector::class.java).let {\n            @Suppress(\"UNCHECKED_CAST\")\n            val m = it as Mapper<Any>\n            when (selector) {\n                is EventC -> {\n                    require(\n                        selector.eventTime != null && selector.id != null && selector.bucketId != null &&\n                                selector.shard != null && selector.shard!! >= 0\n                    ) { \"event keys not provided: $selector\" }\n                }\n                is BucketC -> {\n                    require(selector.bucketId != null) { \"bucket id not provided: $selector\" }\n                }\n                is EventLookupC -> {\n                    require(selector.tenant != null && selector.xrefId != null) { \"look up keys not provided: $selector\" }\n                    selector.lastModified = nowUTC()\n                }\n                is KVC -> {\n                    require(selector.key != null && selector.column != null) { \"kv keys not provided: $selector\" }\n                    selector.lastModified = nowUTC()\n                }\n                else -> throw IllegalArgumentException(\"unknown selector: $selector\")\n            }\n            if (l.isDebugEnabled) l.debug(\"saving entity {}\", selector)\n            m.saveAsync(selector, saveNullFields(false), writeConsistency).transform { _ -> if (l.isDebugEnabled) l.debug(\"saved entity {}\", selector); selector }\n        }\n    }\n\n    override fun remove(selector: T): ListenableFuture<T> {\n        return mappingManager.mapper(selector::class.java).let {\n            @Suppress(\"UNCHECKED_CAST\")\n            val m = it as Mapper<Any>\n            when (selector) {\n                is EventC -> {\n                    require(\n                        selector.eventTime != null && selector.id != null &&\n                                selector.shard != null && selector.shard!! >= 0\n                    ) { \"event keys not provided: $selector\" }\n                }\n                is BucketC -> {\n                    require(selector.bucketId != null) { \"bucket id not provided: $selector\" }\n                }\n                is EventLookupC -> {\n                    require(selector.tenant != null && selector.xrefId != null) { \"look up keys not provided: $selector\" }\n                }\n                is KVC -> {\n                    require(selector.key != null && selector.column != null) { \"kv keys not provided: $selector\" }\n                }\n                else -> throw IllegalArgumentException(\"unknown selector: $selector\")\n            }\n            if (l.isDebugEnabled) l.debug(\"deleting entity: {}\", selector)\n            m.deleteAsync(selector, writeConsistency).transform { _ -> if (l.isDebugEnabled) l.debug(\"deleted entity {}\", selector); selector }\n        }\n    }\n\n    override fun create(): Cluster {\n        return Cluster.builder()\n            .withCodecRegistry(CodecRegistry().register(EnumCodec(EventStatus.values().toSet())).register(ZdtCodec()))\n            .withClusterName(clusterConfig.clusterName)\n            .withPort(clusterConfig.port)\n            .also { clusterConfig.compression?.run { it.withCompression(ProtocolOptions.Compression.valueOf(this)) } }\n            .withRetryPolicy(if (clusterConfig.downgradingConsistency) DowngradingConsistencyRetryPolicy.INSTANCE else DefaultRetryPolicy.INSTANCE)\n            .also {\n                clusterConfig.localDataCenter?.run {\n                    it.withLoadBalancingPolicy(TokenAwarePolicy(DCAwareRoundRobinPolicy.builder().withLocalDc(this).withUsedHostsPerRemoteDc(0).build()))\n                }\n            }\n            .withReconnectionPolicy(ConstantReconnectionPolicy(clusterConfig.reconnectPeriod))\n            .withSocketOptions(SocketOptions().apply {\n                connectTimeoutMillis = clusterConfig.connectionTimeOut\n                readTimeoutMillis = clusterConfig.readTimeout\n                keepAlive = clusterConfig.keepTCPConnectionAlive\n            })\n            .withPoolingOptions(PoolingOptions().apply {\n                clusterConfig.apply {\n                    setConnectionsPerHost(LOCAL, coreConnectionsPerLocalHost, maxConnectionsPerLocalHost)\n                    setConnectionsPerHost(REMOTE, coreConnectionsPerRemoteHost, maxConnectionsPerRemoteHost)\n                }\n                heartbeatIntervalSeconds = 60\n            })\n            .also { clusterConfig.username?.run { it.withCredentials(this, clusterConfig.password) } }\n            .addContactPoints(*clusterConfig.contactPoints.split(\",\").toTypedArray())\n            .apply { decorate(this) }\n            .build()\n    }\n\n    protected open fun decorate(builder: Cluster.Builder) {\n    }\n\n    override fun unwrap() = session\n\n    override fun load(bucketId: ZonedDateTime, shard: Int, fetchSize: Int, eventTime: ZonedDateTime, eventId: String, context: Any?): ListenableFuture<Pair<Any?, List<Event>>> {\n        return mappingManager.session.executeAsync(loaderQuery.bind(bucketId, shard, eventTime, eventId, fetchSize)).transform { null to mappingManager.mapper(EventC::class.java).map(it!!).toList() }\n    }\n}\n"
  },
  {
    "path": "cassandra/src/main/kotlin/com/walmartlabs/bigben/providers/domain/cassandra/ClusterConfig.kt",
    "content": "/*-\n * #%L\n * BigBen:cassandra\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.providers.domain.cassandra\n\nimport com.datastax.driver.core.Cluster\nimport com.datastax.driver.core.ConsistencyLevel\nimport com.datastax.driver.core.ConsistencyLevel.LOCAL_ONE\nimport com.datastax.driver.core.SocketOptions.DEFAULT_CONNECT_TIMEOUT_MILLIS\nimport com.datastax.driver.core.SocketOptions.DEFAULT_READ_TIMEOUT_MILLIS\n\n/**\n * Created by smalik3 on 3/2/18\n */\ndata class ClusterConfig(\n    val clusterName: String = \"bigben\",\n    val contactPoints: String,\n    val port: Int = 9042,\n    val localDataCenter: String?,\n\n    val compression: String?,\n    val keepTCPConnectionAlive: Boolean = true,\n\n    val coreConnectionsPerLocalHost: Int = 1,\n    val maxConnectionsPerLocalHost: Int = 1,\n    val coreConnectionsPerRemoteHost: Int = 1,\n    val maxConnectionsPerRemoteHost: Int = 1,\n    val maxRequestsPerLocalConnection: Int = 32768,\n    val maxRequestsPerRemoteConnection: Int = 2048,\n    val newLocalConnectionThreshold: Int = 3000,\n    val newRemoteConnectionThreshold: Int = 400,\n    val poolTimeoutMillis: Int = 0,\n\n    val connectionTimeOut: Int = DEFAULT_CONNECT_TIMEOUT_MILLIS,\n    val readTimeout: Int = DEFAULT_READ_TIMEOUT_MILLIS,\n    val reconnectPeriod: Long = 5L,\n\n    val username: String?,\n    val password: String?,\n    val downgradingConsistency: Boolean = false,\n\n    val writeConsistency: ConsistencyLevel = LOCAL_ONE,\n    val readConsistency: ConsistencyLevel = LOCAL_ONE\n)\n\ninterface ClusterFactory {\n    fun create(): Cluster\n}\n"
  },
  {
    "path": "cassandra/src/main/kotlin/com/walmartlabs/bigben/providers/domain/cassandra/Entities.kt",
    "content": "/*-\n * #%L\n * BigBen:cassandra\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.providers.domain.cassandra\n\nimport com.datastax.driver.mapping.annotations.*\nimport com.hazelcast.nio.ObjectDataInput\nimport com.hazelcast.nio.ObjectDataOutput\nimport com.walmartlabs.bigben.entities.*\nimport com.walmartlabs.bigben.extns.utc\nimport com.walmartlabs.bigben.hz.HzObjectFactory.Companion.BIGBEN_FACTORY_ID\nimport com.walmartlabs.bigben.hz.HzObjectFactory.ObjectId.BUCKET\nimport java.time.ZonedDateTime\nimport java.util.*\n\n/**\n * Created by smalik3 on 2/26/18\n */\n@Table(name = \"buckets\")\ndata class BucketC(@PartitionKey @Column(name = \"id\") override var bucketId: ZonedDateTime? = null,\n                   override var status: EventStatus? = null,\n                   override var count: Long? = null,\n                   @Column(name = \"processed_at\") override var processedAt: ZonedDateTime? = null,\n                   @Column(name = \"modified_at\") override var updatedAt: ZonedDateTime? = null,\n                   @Column(name = \"failed_shards\", codec = FailedShardsCodec::class) override var failedShards: Set<Int>? = null) : Bucket {\n    @Transient\n    override fun getFactoryId() = BIGBEN_FACTORY_ID\n\n    @Transient\n    override fun getId() = BUCKET.ordinal\n\n    override fun writeData(out: ObjectDataOutput) {\n        BitSet(4).apply {\n            set(0, bucketId != null)\n            set(1, status != null)\n            set(2, count != null)\n            set(3, processedAt != null)\n            set(4, updatedAt != null)\n        }.also { out.writeByteArray(it.toByteArray()) }.apply {\n            if (get(0)) out.writeLong(bucketId!!.toInstant().toEpochMilli())\n            if (get(1)) out.writeByte(status!!.ordinal)\n            if (get(2)) out.writeLong(count!!)\n            if (get(3)) out.writeLong(processedAt!!.toInstant().toEpochMilli())\n            if (get(4)) out.writeLong(updatedAt!!.toInstant().toEpochMilli())\n        }\n    }\n\n    override fun readData(ins: ObjectDataInput) {\n        BitSet.valueOf(ins.readByteArray()).apply {\n            if (get(0)) bucketId = utc(ins.readLong())\n            if (get(1)) status = EventStatus.values()[ins.readByte().toInt()]\n            if (get(2)) count = ins.readLong()\n            if (get(3)) processedAt = utc(ins.readLong())\n            if (get(4)) updatedAt = utc(ins.readLong())\n        }\n    }\n}\n\n@Table(name = \"events\")\ndata class EventC(@ClusteringColumn @Column(name = \"event_time\") override var eventTime: ZonedDateTime? = null,\n                  @ClusteringColumn(1) override var id: String? = null,\n                  @PartitionKey @Column(name = \"bucket_id\") override var bucketId: ZonedDateTime? = null,\n                  @PartitionKey(1) override var shard: Int? = null,\n                  override var status: EventStatus? = null,\n                  override var error: String? = null,\n                  override var tenant: String? = null,\n                  @Column(name = \"xref_id\") override var xrefId: String? = null,\n                  @Column(name = \"processed_at\") override var processedAt: ZonedDateTime? = null,\n                  override var payload: String? = null,\n                  @Transient override var eventResponse: EventResponse? = null,\n                  @Transient override var deliveryOption: EventDeliveryOption? = null) : Event\n\n@Table(name = \"lookups\")\ndata class EventLookupC(@PartitionKey override var tenant: String? = null,\n                        @PartitionKey(1) @Column(name = \"xref_id\") override var xrefId: String? = null,\n                        @Column(name = \"bucket_id\") override var bucketId: ZonedDateTime? = null,\n                        override var shard: Int? = null,\n                        @Column(name = \"event_time\") override var eventTime: ZonedDateTime? = null,\n                        @Column(name = \"event_id\") override var eventId: String? = null,\n                        override var payload: String? = null,\n                        @Column(name = \"l_m\") var lastModified: ZonedDateTime? = null) : EventLookup\n\n@Table(name = \"kv_table\")\ndata class KVC(@PartitionKey override var key: String? = null,\n               @ClusteringColumn override var column: String? = null,\n               override var value: String? = null,\n               @Column(name = \"l_m\") var lastModified: ZonedDateTime? = null\n) : KV\n"
  },
  {
    "path": "cassandra/src/main/kotlin/com/walmartlabs/bigben/providers/domain/cassandra/codecs.kt",
    "content": "/*-\n * #%L\n * BigBen:cassandra\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.providers.domain.cassandra\n\nimport com.datastax.driver.core.DataType\nimport com.datastax.driver.core.ProtocolVersion\nimport com.datastax.driver.core.TypeCodec\nimport com.google.common.reflect.TypeToken\nimport com.walmartlabs.bigben.extns.utc\nimport com.walmartlabs.bigben.utils.json\nimport com.walmartlabs.bigben.utils.typeRefJson\nimport java.nio.ByteBuffer\nimport java.time.ZonedDateTime\n\n/**\n * Created by smalik3 on 3/2/18\n */\nclass EnumCodec<T : Enum<T>>(values: Set<T>) : TypeCodec<T>(DataType.varchar(), @Suppress(\"UNCHECKED_CAST\") (values.first()::class.java as Class<T>)) {\n    private val forward = values.associate { it.name to it }\n\n    override fun format(value: T) = value.name\n    override fun parse(value: String?) = value?.let { forward[it] }\n\n    override fun serialize(value: T?, protocolVersion: ProtocolVersion?) = value?.let { ByteBuffer.wrap(format(it).toByteArray()) }\n    override fun deserialize(bytes: ByteBuffer?, protocolVersion: ProtocolVersion?) = bytes?.let { parse(String(bytes.duplicate().array())) }\n}\n\nclass ZdtCodec : TypeCodec<ZonedDateTime>(DataType.timestamp(), ZonedDateTime::class.java) {\n    override fun format(value: ZonedDateTime?) = value?.toInstant()?.toEpochMilli()?.toString()\n    override fun parse(value: String?): ZonedDateTime? = value?.let { utc(it.toLong()) }\n\n    override fun serialize(value: ZonedDateTime?, protocolVersion: ProtocolVersion?) = value?.let { ByteBuffer.allocate(8).apply { asLongBuffer().put(value.toInstant().toEpochMilli()) } }\n    override fun deserialize(bytes: ByteBuffer?, protocolVersion: ProtocolVersion?) = bytes?.let { utc(bytes.duplicate().asLongBuffer().get()) }\n}\n\nclass FailedShardsCodec : TypeCodec<Set<Int>>(DataType.text(), object : TypeToken<Set<Int>>() {}) {\n    override fun format(value: Set<Int>?) = value?.json()\n    override fun parse(value: String?): Set<Int>? = value?.let { typeRefJson<Set<Int>>(it) }\n\n    override fun serialize(value: Set<Int>?, protocolVersion: ProtocolVersion?) = value?.let { ByteBuffer.wrap(it.json().toByteArray()) }\n    override fun deserialize(bytes: ByteBuffer?, protocolVersion: ProtocolVersion?) = bytes?.let { typeRefJson<Set<Int>>(String(it.array())) }\n}\n"
  },
  {
    "path": "cassandra/src/main/resources/bigben-schema.cql",
    "content": "-- DROP KEYSPACE IF EXISTS bigben;\n\nCREATE KEYSPACE IF NOT EXISTS bigben WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 };\n\n-- DROP TABLE IF EXISTS bigben.buckets;\n\nCREATE TABLE IF NOT EXISTS bigben.buckets (\n\tid timestamp PRIMARY KEY,\n\tcount bigint,\n\tfailed_shards text,\n\tmodified_at timestamp,\n\tprocessed_at timestamp,\n\tstatus text\n);\n\n-- DROP TABLE IF EXISTS bigben.lookups;\n\nCREATE TABLE IF NOT EXISTS bigben.lookups (\n\ttenant text,\n\txref_id text,\n\tbucket_id timestamp,\n\tevent_id text,\n\tevent_time timestamp,\n\tl_m timestamp,\n\tpayload text,\n\tshard int,\n\tPRIMARY KEY ((tenant, xref_id))\n);\n\n-- DROP TABLE IF EXISTS bigben.events;\n\nCREATE TABLE IF NOT EXISTS bigben.events (\n\tbucket_id timestamp,\n\tshard int,\n\tevent_time timestamp,\n\tid text,\n\terror text,\n\tpayload text,\n\tprocessed_at timestamp,\n\tstatus text,\n\ttenant text,\n\txref_id text,\n\tPRIMARY KEY ((bucket_id, shard), event_time, id)\n) WITH CLUSTERING ORDER BY (event_time ASC, id ASC);\n\n-- DROP TABLE IF EXISTS bigben.kv_table;\n\nCREATE TABLE IF NOT EXISTS bigben.kv_table (\n\tkey text,\n\tcolumn text,\n\tl_m timestamp,\n\tvalue text,\n\tPRIMARY KEY (key, column)\n) WITH CLUSTERING ORDER BY (column ASC);\n\n"
  },
  {
    "path": "cassandra/src/test/kotlin/com/walmartlabs/bigben/cassandra/tests/IntegrationTests.kt",
    "content": "/*-\n * #%L\n * BigBen:cassandra\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.cassandra.tests\n\nimport com.datastax.driver.core.Session\nimport com.walmartlabs.bigben.BigBen\nimport com.walmartlabs.bigben.BigBen.module\nimport com.walmartlabs.bigben.entities.*\nimport com.walmartlabs.bigben.extns.bucket\nimport com.walmartlabs.bigben.extns.fetch\nimport com.walmartlabs.bigben.extns.nowUTC\nimport com.walmartlabs.bigben.extns.save\nimport org.testng.annotations.BeforeMethod\nimport org.testng.annotations.Test\nimport kotlin.test.assertEquals\nimport kotlin.test.assertTrue\n\n/**\n * Created by smalik3 on 4/12/18\n */\nclass IntegrationTests {\n\n    companion object {\n        init {\n            System.setProperty(\"bigben.configs\", \"file://bigben-test.yaml\")\n            BigBen.init()\n        }\n    }\n\n    @BeforeMethod\n    private fun `clean up db`() {\n        println(\"cleaning up the db\")\n        try {\n            (module<EntityProvider<Any>>().unwrap() as Session).apply {\n                execute(\"truncate bigben.events;\")\n                execute(\"truncate bigben.lookups;\")\n                execute(\"truncate bigben.buckets;\")\n                execute(\"truncate bigben.kv_table;\")\n            }\n        } catch (e: Throwable) {\n            e.printStackTrace()\n            throw e\n        }\n    }\n\n    @Test\n    fun `test bucket`() {\n        val nowUTC = nowUTC().bucket()\n        save<Bucket> { it.bucketId = nowUTC; it.count = 10 }.get()\n        val bucket = fetch<Bucket> { it.bucketId = nowUTC }.get()!!\n        assertEquals(bucket.count, 10)\n    }\n\n    @Test\n    fun `test event loader`() {\n        val bucket = nowUTC().bucket()\n        val events = (0..99).map { i ->\n            save<Event> {\n                it.bucketId = bucket; it.shard = i / 10; it.eventTime = bucket.plusSeconds(10)\n                it.id = \"e_$i\"; it.status = EventStatus.UN_PROCESSED\n            }.get()\n        }.associate { \"${it.eventTime}-${it.id}\" to it }.toMutableMap()\n        val fetchSize = 20\n        (0..10).forEach {\n            var l = module<EventLoader>().load(bucket, it, fetchSize).get()\n            while (l.second.isNotEmpty()) {\n                l.second.forEach {\n                    assertEquals(events[\"${it.eventTime}-${it.id}\"], it)\n                    events.remove(\"${it.eventTime}-${it.id}\")\n                }\n                l =\n                    module<EventLoader>().load(bucket, it, fetchSize, l.second.last().eventTime!!, l.second.last().id!!, l.first)\n                        .get()\n            }\n        }\n        assertTrue { events.isEmpty() }\n    }\n\n    @Test\n    fun `event added successfully`() {\n        val bucket = nowUTC().bucket()\n        save<Event> {\n            it.bucketId = bucket; it.shard = 1; it.eventTime = bucket.plusSeconds(10)\n            it.id = \"e1\"; it.status = EventStatus.UN_PROCESSED\n        }.get()\n        val event = fetch<Event> {\n            it.bucketId = bucket; it.shard = 1; it.eventTime = bucket.plusSeconds(10); it.id = \"e1\"\n        }.get()!!\n        assertEquals(event.status, EventStatus.UN_PROCESSED)\n    }\n}\n"
  },
  {
    "path": "cassandra/src/test/kotlin/com/walmartlabs/bigben/cassandra/tests/ORMTests.kt",
    "content": "/*-\n * #%L\n * BigBen:cassandra\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.cassandra.tests\n\nimport com.walmartlabs.bigben.BigBen\nimport com.walmartlabs.bigben.entities.EventStatus.PROCESSED\nimport com.walmartlabs.bigben.entities.KV\nimport com.walmartlabs.bigben.extns.*\nimport com.walmartlabs.bigben.providers.domain.cassandra.BucketC\nimport com.walmartlabs.bigben.providers.domain.cassandra.CassandraModule.Companion.mappingManager\nimport com.walmartlabs.bigben.providers.domain.cassandra.EventC\nimport com.walmartlabs.bigben.providers.domain.cassandra.EventLookupC\nimport org.testng.annotations.Test\nimport java.util.*\nimport java.util.concurrent.TimeUnit.MINUTES\nimport kotlin.test.assertEquals\nimport kotlin.test.assertNotNull\n\n/**\n * Created by smalik3 on 3/2/18\n */\nclass ORMTests {\n\n    companion object {\n        init {\n            System.setProperty(\"bigben.configs\", \"file://bigben-test.yaml\")\n            BigBen.init()\n        }\n    }\n\n    @Test\n    fun `test bucket orm`() {\n        val b = BucketC(nowUTC(), PROCESSED, 10, nowUTC(), nowUTC())\n        val mapper = mappingManager.mapper(BucketC::class.java)\n        mapper.save(b)\n        val newBucket = mapper[b.bucketId]\n        assertEquals(b, newBucket)\n    }\n\n    @Test\n    fun `test event orm`() {\n        val eventTime = nowUTC()\n        val e = EventC(eventTime, UUID.randomUUID().toString(), eventTime.bucket(), 1, PROCESSED, null,\n                \"default\", processedAt = eventTime.plusSeconds(1), xrefId = \"xref_1\", payload = \"{payload}\")\n        val mapper = mappingManager.mapper(EventC::class.java)\n        mapper.save(e)\n        val newEventC = mapper[eventTime.bucket(), 1, eventTime, e.id]\n        assertEquals(e, newEventC)\n    }\n\n    @Test\n    fun `test event look up orm`() {\n        val el = EventLookupC(\"default\", UUID.randomUUID().toString(), nowUTC().bucket(), 2, nowUTC(), \"event_1\", \"payload1\")\n        val mapper = mappingManager.mapper(EventLookupC::class.java)\n        mapper.save(el)\n        val newEventLookupC = mapper[el.tenant, el.xrefId]\n        assertEquals(el, newEventLookupC)\n    }\n\n    @Test\n    fun `test kv`() {\n        val key = UUID.randomUUID().toString()\n        save<KV> { it.key = key; it.column = 1.toString(); it.value = \"Value1\" }.get(1, MINUTES)\n        save<KV> { it.key = key; it.column = 2.toString(); it.value = \"Value2\" }.get(1, MINUTES)\n        val kv = fetch<KV> { it.key = key; it.column = 1.toString() }.get(1, MINUTES)\n        assertNotNull(kv)\n        assertEquals(kv.value, \"Value1\")\n        val kvs = kvs { it.key = key }.get(1, MINUTES)\n        assertEquals(kvs.size, 2)\n        kvs.associate { it.column to it.value }.apply {\n            assertEquals(this[1.toString()], \"Value1\")\n            assertEquals(this[2.toString()], \"Value2\")\n        }\n    }\n}\n"
  },
  {
    "path": "cassandra/src/test/resources/bigben-test.yaml",
    "content": "# top level modules\nmodules:\n  - name: domain\n    class: com.walmartlabs.bigben.providers.domain.cassandra.CassandraModule\n  - name: processors\n    object: com.walmartlabs.bigben.processors.ProcessorRegistry\n  - name: hz\n    class: com.walmartlabs.bigben.utils.hz.Hz\n  - name: scheduler\n    object: com.walmartlabs.bigben.SchedulerModule\n  - name: events\n    object: com.walmartlabs.bigben.EventModule\n\n# hazelcast properties\nhz:\n  template: file://hz.template.xml\n  group:\n    name: bigben-dev\n    password: bigben-dev\n  network:\n    autoIncrementPort: true\n    members: 127.0.0.1\n    port: 5701\n  map:\n    store:\n      writeDelay: 30\n\n# cassandra related properties\ncassandra:\n  keyspace: bigben\n  cluster:\n    contactPoints: 127.0.0.1\n    clusterName: bigben-cluster\n    port: 9042\n    localDataCenter: null\n    coreConnectionsPerHost: 8\n    maxHostsPerConnection: 32768\n    keepTCPConnectionAlive: true\n    connectionTimeOut: 5000\n    readTimeout: 12000\n    reconnectPeriod: 5\n    username: null\n    password: null\n    downgradingConsistency: false\n    writeConsistency: \"LOCAL_QUORUM\"\n    readConsistency: \"LOCAL_QUORUM\"\n\n# system properties\ntask:\n  executor:\n    #retry.thread.count: 8\n    retry.time.units: SECONDS\n    delay: 1\n    max.retries: 3\n    backoff.multiplier: 2\n\nmessaging.producer.factory.class: com.walmartlabs.bigben.processors.NoOpMessageProducerFactory\ngeneric.future.max.get.time: 60\n\n# scheduler / event related properties\nevents:\n  scheduler.enabled: true\n  schedule.scan.interval.minutes: 1\n  num.shard.submitters: 8\n  receiver:\n    shard.size: 10\n    lapse.offset.minutes: 0\n    delete:\n      max.retries: 3\n      initial.delay: 1\n      backoff.multiplier: 1\n  submit:\n    initial.delay: 1\n    backoff.multiplier: 1\n    max.retries: 3\n  processor:\n    max.retries: 3\n    initial.delay: 1\n    backoff.multiplier: 2\n    eager.loading: true\n  tasks:\n    max.events.in.memory: 100000\n    scheduler.worker.threads: 8\n\n# bucket manager / loader related properties\nbuckets:\n  backlog.check.limit: 30\n  background:\n    load.fetch.size: 10\n    load.wait.interval.seconds: 1\n  checkpoint:\n    interval: 60\n    interval.units: SECONDS\n\n# kafka related properties\nkafka:\n  producer:\n    config: # this is default kafka producer config, these values will be used if not supplied during the tenant registration\n      key.serializer: org.apache.kafka.common.serialization.StringSerializer\n      value.serializer: org.apache.kafka.common.serialization.StringSerializer\n      acks: \"1\"\n      buffer.memory: 32400\n      retries: 3\n"
  },
  {
    "path": "cassandra/src/test/resources/log4j.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!--\n  #%L\n  BigBen:app\n  =======================================\n  Copyright (C) 2016 - 2018 Walmart Inc.\n  =======================================\n  Licensed under the Apache License, Version 2.0 (the \"License\");\n  you may not use this file except in compliance with the License.\n  You may obtain a copy of the License at\n  \n       http://www.apache.org/licenses/LICENSE-2.0\n  \n  Unless required by applicable law or agreed to in writing, software\n  distributed under the License is distributed on an \"AS IS\" BASIS,\n  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  See the License for the specific language governing permissions and\n  limitations under the License.\n  #L%\n  -->\n\n<!DOCTYPE log4j:configuration SYSTEM \"log4j.dtd\">\n<log4j:configuration debug=\"true\"\n                     xmlns:log4j='http://jakarta.apache.org/log4j/'\n                     xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n                     xsi:schemaLocation=\"http://jakarta.apache.org/log4j/ \">\n\n    <appender name=\"console\" class=\"org.apache.log4j.ConsoleAppender\">\n        <layout class=\"org.apache.log4j.PatternLayout\">\n            <param name=\"ConversionPattern\"\n                   value=\"%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%t] %c{1}:%L - %m%n\"/>\n        </layout>\n    </appender>\n\n    <logger name=\"com.walmartlabs.bigben\" additivity=\"false\">\n        <level value=\"DEBUG\"/>\n        <appender-ref ref=\"console\"/>\n    </logger>\n\n</log4j:configuration>\n"
  },
  {
    "path": "cassandra/src/test/resources/testng.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!--\n  #%L\n  BigBen:cassandra\n  =======================================\n  Copyright (C) 2016 - 2018 Walmart Inc.\n  =======================================\n  Licensed under the Apache License, Version 2.0 (the \"License\");\n  you may not use this file except in compliance with the License.\n  You may obtain a copy of the License at\n  \n       http://www.apache.org/licenses/LICENSE-2.0\n  \n  Unless required by applicable law or agreed to in writing, software\n  distributed under the License is distributed on an \"AS IS\" BASIS,\n  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  See the License for the specific language governing permissions and\n  limitations under the License.\n  #L%\n  -->\n\n<!DOCTYPE suite SYSTEM \"http://testng.org/testng-1.0.dtd\">\n<suite name=\"bigben-domain-tests\">\n    <test name=\"domain-tests\" verbose=\"1\" parallel=\"false\"/>\n</suite>\n"
  },
  {
    "path": "commons/LICENSE.txt",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2018 Sandeep Malik\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "commons/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n    <parent>\n        <artifactId>bigben</artifactId>\n        <groupId>com.walmartlabs.bigben</groupId>\n        <version>1.0.7-SNAPSHOT</version>\n    </parent>\n\n    <artifactId>bigben-commons</artifactId>\n    <packaging>takari-jar</packaging>\n    <name>BigBen:commons</name>\n\n    <dependencies>\n        <dependency>\n            <groupId>org.slf4j</groupId>\n            <artifactId>slf4j-api</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>io.github.microutils</groupId>\n            <artifactId>kotlin-logging</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.google.guava</groupId>\n            <artifactId>guava</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.fasterxml.jackson.core</groupId>\n            <artifactId>jackson-databind</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.fasterxml.jackson.module</groupId>\n            <artifactId>jackson-module-kotlin</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-text</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.hazelcast</groupId>\n            <artifactId>hazelcast</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>org.jetbrains.kotlin</groupId>\n            <artifactId>kotlin-reflect</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.fasterxml.jackson.dataformat</groupId>\n            <artifactId>jackson-dataformat-yaml</artifactId>\n            <version>2.9.5</version>\n        </dependency>\n        <dependency>\n            <groupId>org.testng</groupId>\n            <artifactId>testng</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.slf4j</groupId>\n            <artifactId>slf4j-log4j12</artifactId>\n            <version>1.7.25</version>\n            <scope>test</scope>\n        </dependency>\n    </dependencies>\n\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.jetbrains.kotlin</groupId>\n                <artifactId>kotlin-maven-plugin</artifactId>\n            </plugin>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-surefire-plugin</artifactId>\n                <configuration>\n                    <systemPropertyVariables>\n                        <buildDirectory>${project.build.directory}</buildDirectory>\n                    </systemPropertyVariables>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n</project>"
  },
  {
    "path": "commons/src/main/kotlin/com/walmartlabs/bigben/utils/_extns.kt",
    "content": "/*-\n * #%L\n * BigBen:commons\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.utils\n\nimport com.fasterxml.jackson.core.JsonGenerator\nimport com.fasterxml.jackson.core.JsonParser\nimport com.fasterxml.jackson.core.Version\nimport com.fasterxml.jackson.core.type.TypeReference\nimport com.fasterxml.jackson.databind.*\nimport com.fasterxml.jackson.databind.module.SimpleModule\nimport com.fasterxml.jackson.dataformat.yaml.YAMLFactory\nimport com.fasterxml.jackson.module.kotlin.KotlinModule\nimport com.google.common.base.Throwables\nimport mu.KotlinLogging\nimport org.slf4j.LoggerFactory\nimport java.time.ZonedDateTime\n\n/**\n * Created by smalik3 on 2/21/18\n */\ninline fun <reified T : Any> logger() = KotlinLogging.logger(unwrapCompanionClass(T::class.java).name)\n//LoggerFactory.getLogger(unwrapCompanionClass(T::class.java).name)!!\n\nfun logger(name: String) = LoggerFactory.getLogger(name)!!\n\nfun <T : Any> unwrapCompanionClass(ofClass: Class<T>): Class<*> {\n    return if (ofClass.enclosingClass != null && ofClass.enclosingClass.kotlin.isCompanion) {\n        ofClass.enclosingClass\n    } else {\n        ofClass\n    }\n}\n\nfun Throwable?.rootCause() = this?.let { Throwables.getRootCause(this) }\n\nfun Throwable?.stackTraceAsString() = this?.let { Throwables.getStackTraceAsString(this) }\n\nfun zdtModule() = SimpleModule(\"ZDT\", Version(1, 0, 0, null, null, null)).also {\n    it.addSerializer(ZonedDateTime::class.java, object : JsonSerializer<ZonedDateTime>() {\n        override fun serialize(p0: ZonedDateTime?, p1: JsonGenerator, p2: SerializerProvider) {\n            p0?.let { p1.writeString(it.toString()) } ?: p1.writeNull()\n        }\n    })\n    it.addDeserializer(ZonedDateTime::class.java, object : JsonDeserializer<ZonedDateTime>() {\n        override fun deserialize(jp: JsonParser, dc: DeserializationContext): ZonedDateTime? {\n            return jp.codec.readValue(jp, String::class.java)?.let { ZonedDateTime.parse(it) }\n        }\n    })\n}\n\ntypealias Json = Map<String, Any>\n\nval om = ObjectMapper().registerModule(KotlinModule()).registerModule(zdtModule())!!\n\nfun Any.json(): String = om.writeValueAsString(this)\nfun Any.yaml(): String = omYaml.writeValueAsString(this)\nfun <T> Class<T>.fromJson(s: String) = om.readValue(s, this)!!\nfun <T> TypeReference<T>.fromJson(s: String): T = om.readValue(s, this)\ninline fun <reified T> typeRefJson(s: String) = object : TypeReference<T>() {}.fromJson(s)\n\nval omYaml = ObjectMapper(YAMLFactory()).registerModule(KotlinModule()).registerModule(zdtModule())!!\n\nfun <T> Class<T>.fromYaml(s: String) = omYaml.readValue(s, this)!!\nfun <T> TypeReference<T>.fromYaml(s: String): T = omYaml.readValue(s, this)\ninline fun <reified T> typeRefYaml(s: String) = object : TypeReference<T>() {}.fromYaml(s)\n"
  },
  {
    "path": "commons/src/main/kotlin/com/walmartlabs/bigben/utils/_future_extns.kt",
    "content": "/*-\n * #%L\n * BigBen:commons\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.utils\n\nimport com.google.common.base.Function\nimport com.google.common.base.Throwables.getStackTraceAsString\nimport com.google.common.util.concurrent.*\nimport com.google.common.util.concurrent.MoreExecutors.directExecutor\nimport com.hazelcast.core.ICompletableFuture\nimport com.walmartlabs.bigben.utils.commons.ListenableFutureAdapter\nimport com.walmartlabs.bigben.utils.commons.Props\nimport com.walmartlabs.bigben.utils.commons.TaskExecutor\nimport org.slf4j.LoggerFactory\nimport java.util.UUID.randomUUID\nimport java.util.concurrent.ExecutorService\nimport java.util.concurrent.Future\nimport java.util.concurrent.ScheduledExecutorService\nimport java.util.concurrent.TimeUnit\nimport java.util.concurrent.TimeUnit.SECONDS\n\n/**\n * Created by smalik3 on 2/21/18\n */\nval _l = LoggerFactory.getLogger(\"com.walmartlabs.bigben.utils\")!!\n\nfun onError(t: Throwable?) = _l.error(\"error in processing: ${getStackTraceAsString(t.rootCause()!!)}\", t.rootCause())\n\nfun <T> ListenableFuture<T>.done(onError: (Throwable?) -> Unit = ::onError, onSuccess: (T?) -> Unit): ListenableFuture<T> {\n    return also {\n        Futures.addCallback(it, object : FutureCallback<T> {\n            override fun onFailure(t: Throwable?) {\n                onError(t)\n            }\n\n            override fun onSuccess(result: T?) {\n                onSuccess(result)\n            }\n        }, directExecutor())\n    }\n}\n\nfun <T> List<ListenableFuture<T>>.done(onError: (Throwable?) -> Unit = ::onError, onSuccess: (List<T>?) -> Unit): ListenableFuture<List<T>> {\n    return reduce().done(onError, onSuccess)\n}\n\nfun <T> List<ListenableFuture<T>>.reduce(): ListenableFuture<List<T>> {\n    return Futures.allAsList(this)\n}\n\nfun <T, R> ListenableFuture<T>.transform(t: (T?) -> R): ListenableFuture<R> {\n    return Futures.transform(this, Function { t(it) }, directExecutor())\n}\n\nfun <T> ListenableFuture<T>.catching(t: (Throwable?) -> T): ListenableFuture<T> {\n    return Futures.catching(this, Exception::class.java, Function { t(it) }, directExecutor())\n}\n\nfun <T> ListenableFuture<T>.catchingAsync(t: (Throwable?) -> ListenableFuture<T>): ListenableFuture<T> {\n    return Futures.catchingAsync(this, Exception::class.java, AsyncFunction { t(it) }, directExecutor())\n}\n\nfun <T, R> ListenableFuture<T>.transformAsync(t: (T?) -> ListenableFuture<R>): ListenableFuture<R> {\n    return Futures.transformAsync(this, AsyncFunction { t(it) }, directExecutor())\n}\n\nfun <T> AsyncCallable<T>.scheduleAsync(delay: Long, units: TimeUnit, scheduledExecutor: ScheduledExecutorService): ListenableFuture<T> {\n    return Futures.scheduleAsync(this, delay, units, scheduledExecutor)\n}\n\nfun <T> AsyncCallable<T>.submitAsync(executorService: ExecutorService): ListenableFuture<T> {\n    return Futures.submitAsync(this, executorService)\n}\n\nprivate val te = TaskExecutor(setOf(Exception::class.java))\n\nfun <T> (() -> ListenableFuture<T>).retriable(taskId: String = randomUUID().toString(),\n                                              maxRetries: Int = Props.int(\"task.executor.max.retries\"),\n                                              delay: Int = Props.int(\"task.executor.delay\"),\n                                              backoffMultiplier: Int = Props.int(\"task.executor.backoff.multiplier\"),\n                                              timeUnit: TimeUnit = SECONDS,\n                                              taskExecutor: TaskExecutor = te): ListenableFuture<T> =\n        taskExecutor.async(taskId, maxRetries, delay, backoffMultiplier, timeUnit, this)\n\n@Suppress(\"UNCHECKED_CAST\")\nfun <T, F : Future<T>> F.listenable(): ListenableFutureAdapter<T> {\n    require(this is ICompletableFuture<*>) { \"future must be instance of ICompletableFuture\" }\n    return ListenableFutureAdapter(this as ICompletableFuture<T>)\n}\n\nfun <T> Future<T>.result(waitTime: Long = Props.long(\"generic.future.max.get.time\"), errorHandler: (Exception) -> T?): T {\n    return try {\n        get(waitTime, SECONDS)\n    } catch (e: Exception) {\n        errorHandler(e) ?: throw e\n    }\n}\n"
  },
  {
    "path": "commons/src/main/kotlin/com/walmartlabs/bigben/utils/commons/ListenableFutureAdapter.kt",
    "content": "/*-\n * #%L\n * BigBen:commons\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.utils.commons\n\nimport com.google.common.util.concurrent.AbstractFuture\nimport com.hazelcast.core.ExecutionCallback\nimport com.hazelcast.core.ICompletableFuture\nimport java.util.concurrent.ExecutionException\nimport java.util.concurrent.Executor\nimport java.util.concurrent.TimeUnit\nimport java.util.concurrent.TimeoutException\n\n/**\n * Created by smalik3 on 2/23/18\n */\nclass ListenableFutureAdapter<T>(private val delegate: ICompletableFuture<T>) : AbstractFuture<T>() {\n\n    override fun addListener(listener: Runnable, executor: Executor) {\n        super.addListener(listener, executor)\n        delegate.andThen(object : ExecutionCallback<T> {\n            override fun onResponse(response: T) {\n                set(response)\n            }\n\n            override fun onFailure(t: Throwable) {\n                if (t is ExecutionException && t.cause == null) {\n                    t.initCause(RuntimeException(t.message))\n                    setException(RuntimeException(t))\n                } else setException(t)\n            }\n        }, executor)\n    }\n\n    override fun cancel(mayInterruptIfRunning: Boolean) = super.cancel(mayInterruptIfRunning).run { delegate.cancel(mayInterruptIfRunning) }\n    override fun isCancelled() = delegate.isCancelled\n    override fun isDone() = delegate.isDone\n\n    @Throws(InterruptedException::class, ExecutionException::class)\n    override fun get(): T? = delegate.get()\n\n    @Throws(InterruptedException::class, ExecutionException::class, TimeoutException::class)\n    override operator fun get(timeout: Long, unit: TimeUnit): T? = delegate.get(timeout, unit)\n}\n"
  },
  {
    "path": "commons/src/main/kotlin/com/walmartlabs/bigben/utils/commons/Props.kt",
    "content": "/*-\n * #%L\n * BigBen:commons\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.utils.commons\n\nimport com.fasterxml.jackson.core.type.TypeReference\nimport com.google.common.cache.CacheBuilder\nimport com.walmartlabs.bigben.utils.Json\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.omYaml\nimport com.walmartlabs.bigben.utils.yaml\nimport org.apache.commons.text.StrLookup\nimport org.apache.commons.text.StrSubstitutor\nimport java.io.File\nimport java.util.concurrent.atomic.AtomicReference\nimport java.util.function.Supplier\n\n/**\n * Created by smalik3 on 2/21/18\n */\nobject Props : PropsLoader() {\n    fun parse(props: Json) = PropsLoader(props)\n}\n\nopen class PropsLoader(preloaded: Json? = null) {\n\n    companion object {\n        private val l = logger<PropsLoader>()\n\n        private val NULL: Any = Any()\n        private val NULL_PRESENT: Any = Any()\n\n        @Suppress(\"UNCHECKED_CAST\")\n        fun flatten(json: Json): Json {\n            return json.entries.map { e ->\n                if (e.value !is Map<*, *>) listOf(e.key to e.value) else {\n                    flatten(e.value as Json).entries.map { \"${e.key}.${it.key}\" to it.value }\n                }\n            }.flatten().associate { it.first to it.second }\n        }\n\n        private fun unflatten(json: Json): Json {\n            val result = mutableMapOf<String, Any>()\n            val multiple =\n                json.entries.filter { it.key.contains(\".\") }.map { it.key.split(\".\")[0] to 1 }.groupBy { it.first }\n                    .mapValues { it.value.size }.filter { it.value > 1 }\n            json.filter { it.key.split(\".\")[0] !in multiple.keys }.run { result.putAll(this) }\n            multiple.forEach { e ->\n                result[e.key.split(\".\")[0]] =\n                        unflatten(json.filterKeys { it.startsWith(e.key + \".\") }.mapKeys { it.key.split(\".\", limit = 2)[1] })\n            }\n            return result\n        }\n\n        fun merge(base: Json, override: Json, onlyUpdates: Boolean = false): Json {\n            val fBase = flatten(base)\n            val fOverrides = flatten(override)\n            return merge0(fBase, fOverrides, onlyUpdates)\n        }\n\n        private fun merge0(base: Json, override: Json, onlyUpdates: Boolean = false): Json {\n            val result = mutableMapOf<String, Any>().apply { putAll(base) }\n            val added = override.keys - base.keys\n            val updates = base.keys.intersect(override.keys)\n            try {\n                if (!onlyUpdates) added.forEach { if (override[it] != null) require(result.putIfAbsent(it, override[it]!!) == null) }\n            } catch (e: Exception) {\n                println(e)\n            }\n            updates.forEach {\n                val baseVal = base[it]\n                val overrideVal = override[it]!!\n                when (baseVal) {\n                    is Map<*, *> -> {\n                        require(overrideVal is Map<*, *>) { \"incompatible values for key $it: $baseVal, $overrideVal\" }\n                        @Suppress(\"UNCHECKED_CAST\")\n                        result[it] = merge(baseVal as Json, overrideVal as Json)\n                    }\n                    is Collection<*> -> {\n                        require(overrideVal is Collection<*>) { \"incompatible values for key $it: $baseVal, $overrideVal\" }\n                        result[it] = LinkedHashSet(baseVal) + LinkedHashSet(overrideVal)\n                    }\n                    else -> {\n                        result[it] = overrideVal\n                    }\n                }\n            }\n            return result\n        }\n\n        private fun substitute(json: Json): Json {\n            val s1 = StrSubstitutor(StrLookup.systemPropertiesLookup())\n            val s2 = StrSubstitutor(object : StrLookup<Any>() {\n                override fun lookup(key: String): String? {\n                    return System.getenv(key)\n                }\n            })\n            val s3 = StrSubstitutor(StrLookup.mapLookup(json))\n            return substitute0(substitute0(substitute0(json, s1), s2), s3) as Json\n        }\n\n        private fun substitute0(obj: Any?, substitutor: StrSubstitutor): Any? {\n            return when (obj) {\n                null -> null\n                is Map<*, *> -> {\n                    obj.entries.associate { substitute0(it.key, substitutor) to substitute0(it.value, substitutor) }\n                }\n                is Collection<*> -> {\n                    obj.map { substitute0(it, substitutor) }\n                }\n                is String -> {\n                    substitutor.replace(obj)\n                }\n                else -> obj\n            }\n        }\n    }\n\n    private val props = AtomicReference<Json>().apply { preloaded?.let { set(it) } }\n    private val cache = CacheBuilder.newBuilder().build<String, Any>()\n\n    fun load(supplier: Supplier<String>) = load(supplier.get())\n\n    fun load(vararg props: String): PropsLoader {\n        l.info(\"loading props\")\n        props.reversed().map {\n            val x: Json = omYaml.readValue(ResourceLoader.load(it), object : TypeReference<Json>() {}); x to false\n        }.run { this + (System.getenv() to true) + (System.getProperties() as Json to true) }\n            .fold(emptyMap<String, Any>()) { r, e -> merge(r, e.first, e.second) }\n            .run { substitute(this) }.run { unflatten(this) }.let { this.props.set(it) }\n        cache.invalidateAll()\n        l.info(\"loaded props successfully\")\n        if (l.isDebugEnabled) l.debug(\"resolved props:\\n ${this.props.get().yaml()}\")\n        return this\n    }\n\n    fun exists(name: String): Boolean = get(name) != null\n    fun int(name: String, defaultValue: Int = 0) = get(name)?.toString()?.toInt() ?: defaultValue\n    fun long(name: String, defaultValue: Long = 0) = get(name)?.toString()?.toLong() ?: defaultValue\n    fun string(name: String, defaultValue: String = \"\") = get(name)?.toString() ?: defaultValue\n    fun boolean(name: String, defaultValue: Boolean = false) = get(name)?.toString()?.toBoolean() ?: defaultValue\n\n    fun int(name: String) = get(name, true)!!.toString().toInt()\n    fun long(name: String) = get(name, true)!!.toString().toLong()\n    fun string(name: String) = get(name, true)!!.toString()\n    fun boolean(name: String) = get(name, true)!!.toString().toBoolean()\n    @Suppress(\"UNCHECKED_CAST\")\n    fun map(name: String) = get(name, true) as Json\n    fun root() = props.get()!!\n\n    @Suppress(\"UNCHECKED_CAST\")\n    fun list(name: String) = get(name, true) as List<Any>\n\n    private fun get(name: String, required: Boolean = false): Any? {\n        val value = cache.get(name) { resolver(name) }\n        return when {\n            value == NULL && required -> throw IllegalArgumentException(\"no property with name: $name\")\n            value == NULL -> null\n            value == NULL_PRESENT && required -> throw IllegalArgumentException(\"property '$name' has a 'null' value\")\n            else -> value\n        }\n    }\n\n    private fun resolver(name: String, p: Json = props.get()): Any {\n        if (p.containsKey(name)) return p[name]?.let { it } ?: NULL_PRESENT\n        else if (name.contains(\".\")) {\n            val parts = name.split(\".\", limit = 2)\n            return if (p.containsKey(parts[0]) && p[parts[0]] is Map<*, *>) {\n                @Suppress(\"UNCHECKED_CAST\")\n                resolver(parts[1], p[parts[0]] as Json)\n            } else NULL\n        }\n        return NULL\n    }\n}\n\nobject ResourceLoader {\n\n    private val l = logger<ResourceLoader>()\n\n    fun load(location: String): String {\n        return when {\n            location.startsWith(\"uri://\") -> {\n                val f = File(location.substring(\"uri://\".length))\n                l.info(\"reading data from the resource: $f\")\n                require(f.exists()) { \"could not resolve $f to a location\" }\n                f.readText()\n            }\n            location.startsWith(\"file://\") -> {\n                val f = location.substring(\"file://\".length)\n                l.info(\"reading data from the resource: $f\")\n                val ins = Props::class.java.classLoader.getResourceAsStream(f)\n                require(ins != null) { \"could not resolve $f to a location\" }\n                String(ins.readBytes())\n            }\n            else -> throw IllegalArgumentException(\"unknown resource format: $location\")\n        }\n    }\n}"
  },
  {
    "path": "commons/src/main/kotlin/com/walmartlabs/bigben/utils/commons/TaskExecutor.kt",
    "content": "/*-\n * #%L\n * BigBen:commons\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.utils.commons\n\nimport com.fasterxml.jackson.databind.util.ClassUtil.getRootCause\nimport com.google.common.util.concurrent.Futures.immediateFailedFuture\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.google.common.util.concurrent.MoreExecutors.listeningDecorator\nimport com.walmartlabs.bigben.utils.catchingAsync\nimport com.walmartlabs.bigben.utils.commons.Props.int\nimport com.walmartlabs.bigben.utils.commons.Props.string\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.transformAsync\nimport java.lang.Runtime.getRuntime\nimport java.util.UUID.randomUUID\nimport java.util.concurrent.*\nimport java.util.concurrent.atomic.AtomicInteger\nimport java.util.function.Supplier\n\n/**\n * Created by smalik3 on 2/22/18\n */\nclass TaskExecutor(private val logErrorStackDuringIntermediateRetries: Boolean = false, private val isRetriable: (t: Throwable) -> Boolean) {\n    constructor(retriableExceptions: Set<Class<*>>, logErrorStackDuringIntermediateRetries: Boolean = false) : this(logErrorStackDuringIntermediateRetries,\n            fun(t: Throwable?): Boolean { return isRetriable(t, retriableExceptions) })\n\n    companion object {\n        private val l = logger<TaskExecutor>()\n        private val serial = AtomicInteger()\n        private val RETRY_POOL = listeningDecorator(ScheduledThreadPoolExecutor(int(\"task.executor.retry.thread.count\", getRuntime().availableProcessors()),\n                ThreadFactory { r -> Thread(r, \"task-executor-retry-worker#\" + serial.getAndIncrement()) }, ThreadPoolExecutor.CallerRunsPolicy()))\n\n        fun isRetriable(cause: Throwable?, retriableExceptions: Set<Class<*>>): Boolean {\n            return cause?.let { retriableExceptions.find { t -> t.isAssignableFrom(cause::class.java) } != null }\n                    ?: false\n        }\n    }\n\n    fun <R> async(taskId: String = randomUUID().toString(), maxRetries: Int = int(\"task.executor.max.retries\"), delay: Int = int(\"task.executor.delay\"),\n                  backoffMultiplier: Int = int(\"task.executor.backoff.multiplier\"), timeUnit: TimeUnit = TimeUnit.valueOf(string(\"task.executor.retry.time.units\")),\n                  task: () -> ListenableFuture<R>): ListenableFuture<R> {\n        return async(taskId, maxRetries, delay, backoffMultiplier, timeUnit, Supplier { Callable { task() } })\n    }\n\n    fun <R> async(taskId: String = randomUUID().toString(), maxRetries: Int = int(\"task.executor.max.retries\"), delay: Int = int(\"task.executor.delay\"),\n                  backoffMultiplier: Int = int(\"task.executor.backoff.multiplier\"),\n                  timeUnit: TimeUnit = TimeUnit.valueOf(string(\"task.executor.retry.time.units\")), supplier: Supplier<Callable<ListenableFuture<R>>>): ListenableFuture<R> {\n        return async(taskId, 0, maxRetries, delay, backoffMultiplier, timeUnit, supplier)\n    }\n\n    private fun <R> async(taskId: String, retryCount: Int, maxRetries: Int,\n                          delay: Int, backoffMultiplier: Int, timeUnit: TimeUnit, task: Supplier<Callable<ListenableFuture<R>>>): ListenableFuture<R> {\n        return try {\n            task.get().call().catchingAsync { mayBeRetry(task, taskId, retryCount, maxRetries, delay, backoffMultiplier, timeUnit, it!!) }\n        } catch (t: Throwable) {\n            mayBeRetry(task, taskId, retryCount, maxRetries, delay, backoffMultiplier, timeUnit, t)\n        }\n    }\n\n    private fun <R> mayBeRetry(task: Supplier<Callable<ListenableFuture<R>>>, taskId: String, retryCount: Int, maxRetries: Int,\n                               delay: Int, backoffMultiplier: Int, timeUnit: TimeUnit, t: Throwable): ListenableFuture<R> {\n        val cause = getRootCause(t)\n        return if (isRetriable(cause)) {\n            if (retryCount < maxRetries) {\n                if (l.isWarnEnabled) {\n                    if (logErrorStackDuringIntermediateRetries) l.warn(\"operation failed, taskId='{}', retrying after {} {}, retry={}, maxRetry={}, exception='{}'\",\n                            taskId, delay, timeUnit, retryCount, maxRetries, if (cause.message == null) cause::class.java.name else cause.message, cause)\n                    else l.warn(\"operation failed, taskId='{}', retrying after {} {}, retry={}, maxRetry={}, exception='{}'\",\n                            taskId, delay, timeUnit, retryCount, maxRetries, if (cause.message == null) cause::class.java.name else cause.message)\n                }\n                RETRY_POOL.schedule(Callable { async(taskId, retryCount + 1, maxRetries, backoffMultiplier * delay, backoffMultiplier, timeUnit, task) }, delay.toLong(), timeUnit).transformAsync { it -> it!! }\n            } else {\n                l.error(\"operation failed, taskId='{}', after {} retries, will not be retried anymore, exception='{}'\",\n                        taskId, maxRetries, if (cause.message == null) cause::class.java.name else cause.message, cause)\n                immediateFailedFuture<R>(cause)\n            }\n        } else {\n            l.error(\"operation failed, taskId='{}', unexpected exception\", taskId, cause)\n            immediateFailedFuture<R>(cause)\n        }\n    }\n}\n"
  },
  {
    "path": "commons/src/main/kotlin/com/walmartlabs/bigben/utils/commons/modules.kt",
    "content": "/*-\n * #%L\n * BigBen:commons\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.utils.commons\n\nimport com.walmartlabs.bigben.utils.Json\nimport com.walmartlabs.bigben.utils.logger\nimport java.util.concurrent.ConcurrentHashMap\n\n/**\n * Created by smalik3 on 9/17/18\n */\ninterface Module {\n    fun init(registry: ModuleRegistry)\n}\n\nopen class NoOpModule : Module {\n    override fun init(registry: ModuleRegistry) {\n    }\n}\n\nclass ModuleRegistry {\n\n    private val l = logger<ModuleRegistry>()\n\n    val cache = ConcurrentHashMap<Class<*>, Any>()\n\n    @Suppress(\"UNCHECKED_CAST\")\n    inline fun <reified T> module() = cache.computeIfAbsent(T::class.java) {\n        cache.values.firstOrNull { T::class.java.isAssignableFrom(it::class.java) }\n                ?: throw IllegalArgumentException(\"no module found with type: ${T::class.java}\")\n    } as T\n\n    @Suppress(\"UNCHECKED_CAST\")\n    inline fun <reified T : Any> register(t: T) = cache.put(t::class.java, t as Any)\n\n    fun loadModules(props: PropsLoader) {\n        l.info(\"loading modules\")\n        props.list(\"modules\").forEach {\n            @Suppress(\"UNCHECKED_CAST\")\n            val p = Props.parse(it as Json)\n            val enabled = p.boolean(\"enabled\", true)\n            if (!enabled) {\n                l.info(\"skipping disabled module ${p.string(\"name\")}\")\n            } else {\n                l.info(\"initializing module: ${p.string(\"name\")}\")\n                createModule(it).also {\n                    it.init(this)\n                    l.info(\"registering module: ${p.string(\"name\")}\")\n                    register(it)\n                }\n            }\n        }\n    }\n\n    private fun createModule(m: Json): Module {\n        return try {\n            (if (m.containsKey(\"class\")) {\n                (Class.forName(m[\"class\"].toString()).newInstance() as Module)\n            } else Class.forName(m[\"object\"].toString()).getDeclaredField(\"INSTANCE\").apply { isAccessible = true }.get(null) as Module)\n        } catch (e: Exception) {\n            e.printStackTrace()\n            throw ExceptionInInitializerError(e)\n        }\n    }\n}"
  },
  {
    "path": "commons/src/main/kotlin/com/walmartlabs/bigben/utils/hz/ClusterSingleton.kt",
    "content": "/*-\n * #%L\n * BigBen:commons\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.utils.hz\n\nimport com.google.common.collect.Sets.newConcurrentHashSet\nimport com.hazelcast.core.HazelcastInstanceNotActiveException\nimport com.hazelcast.core.LifecycleEvent.LifecycleState.SHUTTING_DOWN\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.rootCause\nimport java.lang.Thread.currentThread\nimport java.util.concurrent.ConcurrentHashMap\nimport java.util.concurrent.Executors.newSingleThreadExecutor\nimport java.util.concurrent.atomic.AtomicInteger\nimport javax.print.attribute.standard.PrinterStateReason.SHUTDOWN\n\n/**\n * Created by smalik3 on 3/1/18\n */\nclass ClusterSingleton(private val service: Service, private val hz: Hz) {\n\n    private val listenerId = ConcurrentHashMap<String, String>()\n    private val index = AtomicInteger()\n\n    companion object {\n        val ACTIVE_SERVICES: MutableSet<String> = newConcurrentHashSet<String>()!!\n        private val l = logger<ClusterSingleton>()\n        private val nonRetriables = setOf(HazelcastInstanceNotActiveException::class.java)\n    }\n\n    private val executor = newSingleThreadExecutor()\n\n    init {\n        executor.submit(task())\n    }\n\n    private fun task(): Runnable = Runnable {\n        val lockName = \"${service.name}_lock\"\n        try {\n            currentThread().name = \"${service.name}_service_thread\"\n            val clusterSingletonLock = hz.hz.getLock(lockName)\n            clusterSingletonLock.lock()\n            l.info(\n                \"cluster singleton elected, '${hz.hz.cluster.localMember.address}/${currentThread().name}' is the new owner for: ${service.name}\"\n            )\n            listenerId.computeIfAbsent(\"listenerId\") {\n                hz.hz.apply { l.info(\"Adding the shutdown hook for cluster singleton: ${service.name}\") }\n                    .lifecycleService.addLifecycleListener { event ->\n                    if (event.state == SHUTDOWN || event.state == SHUTTING_DOWN) {\n                        if (l.isInfoEnabled) l.info(\"node is shutting down, destroying the service: {}\", service.name)\n                        try {\n                            service.destroy()\n                            ACTIVE_SERVICES.remove(service.name)\n                        } catch (e: Exception) {\n                            l.error(\"error in destroying the service: ${service.name}\", e.rootCause())\n                        }\n                    }\n                }.also {\n                    l.info(\"initing the cluster singleton service: ${service.name}\")\n                    service.init()\n                    ACTIVE_SERVICES.add(service.name)\n                }\n            }\n            l.info(\"executing the cluster singleton service: ${service.name}\")\n            service.execute()\n        } catch (e: Exception) {\n            if (e.rootCause()!!::class.java in nonRetriables)\n                l.error(\"error in running the service: ${service.name}\", e.rootCause())\n            else l.error(\"error in running the service: ${service.name}, retrying...\", e.rootCause()) {\n                try {\n                    hz.hz.getLock(lockName).unlock()\n                } catch (e: Exception) {\n                    l.error(\"error in unlocking cluster singleton\", e.rootCause())\n                } finally {\n                    l.info(\"resubmitting ownership claim task: attempt: ${index.incrementAndGet()}\")\n                    executor.submit(task())\n                }\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "commons/src/main/kotlin/com/walmartlabs/bigben/utils/hz/Hz.kt",
    "content": "/*-\n * #%L\n * BigBen:commons\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.utils.hz\n\nimport com.hazelcast.config.XmlConfigBuilder\nimport com.hazelcast.core.Hazelcast.newHazelcastInstance\nimport com.hazelcast.core.HazelcastInstance\nimport com.walmartlabs.bigben.utils.Json\nimport com.walmartlabs.bigben.utils.commons.Module\nimport com.walmartlabs.bigben.utils.commons.ModuleRegistry\nimport com.walmartlabs.bigben.utils.commons.Props.map\nimport com.walmartlabs.bigben.utils.commons.Props.string\nimport com.walmartlabs.bigben.utils.commons.PropsLoader\nimport com.walmartlabs.bigben.utils.commons.ResourceLoader\nimport com.walmartlabs.bigben.utils.json\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.typeRefJson\nimport org.apache.commons.text.StrLookup\nimport org.apache.commons.text.StrSubstitutor\nimport java.io.ByteArrayInputStream\n\n/**\n * Created by smalik3 on 2/23/18\n */\n\nclass Hz : Module {\n\n    companion object {\n        private val l = logger<Hz>()\n    }\n\n    val hz: HazelcastInstance\n\n    init {\n        val config = typeRefJson<Json>(PropsLoader.flatten(map(\"hz\"))!!.json()).let { map ->\n            if (l.isDebugEnabled) l.debug(\"using the hazelcast config from: ${Hz::class.java.getResource(string(\"hz.template\"))}\")\n            val template = ResourceLoader.load(string(\"hz.template\"))\n            StrSubstitutor(object : StrLookup<Any>() {\n                override fun lookup(key: String): String? {\n                    return if (map.containsKey(key)) map[key]!!.toString()\n                    else {\n                        var current: Any? = map\n                        for (it in key.split(\".\")) {\n                            if (current is Map<*, *>)\n                                current = current[it]\n                            else break\n                        }\n                        current?.toString()\n                    }\n                }\n            }).apply { setValueDelimiter(' ') }.replace(template)\n        }\n        hz = newHazelcastInstance(XmlConfigBuilder(ByteArrayInputStream(config.toByteArray())).build())\n        if (l.isDebugEnabled) l.debug(\"hazelcast config file: {}\", config)\n    }\n\n    override fun init(registry: ModuleRegistry) {\n    }\n}\n"
  },
  {
    "path": "commons/src/main/kotlin/com/walmartlabs/bigben/utils/hz/Service.kt",
    "content": "/*-\n * #%L\n * BigBen:commons\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.utils.hz\n\n/**\n * Created by smalik3 on 2/23/18\n */\ninterface Service {\n    val name: String\n    fun init()\n    fun execute()\n    fun destroy()\n}\n"
  },
  {
    "path": "commons/src/test/kotlin/PropsTests.kt",
    "content": "/*-\n * #%L\n * BigBen:commons\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\nimport com.walmartlabs.bigben.utils.commons.Props\nimport com.walmartlabs.bigben.utils.commons.Props.boolean\nimport com.walmartlabs.bigben.utils.commons.Props.exists\nimport com.walmartlabs.bigben.utils.commons.Props.int\nimport com.walmartlabs.bigben.utils.commons.Props.list\nimport com.walmartlabs.bigben.utils.commons.Props.long\nimport com.walmartlabs.bigben.utils.commons.Props.map\nimport com.walmartlabs.bigben.utils.commons.Props.string\nimport com.walmartlabs.bigben.utils.commons.PropsLoader\nimport com.walmartlabs.bigben.utils.commons.ResourceLoader\nimport com.walmartlabs.bigben.utils.typeRefYaml\nimport org.testng.annotations.Test\nimport java.util.function.Supplier\nimport kotlin.test.assertEquals\nimport kotlin.test.assertTrue\n\n/**\n * Created by smalik3 on 7/6/18\n */\nclass PropsTests {\n\n    @Test(priority = 1)\n    fun `props test - file`() {\n        Props.load(\"file://props.yaml\")\n        asserts()\n    }\n\n    @Test(priority = 2)\n    fun `prop test - supplier`() {\n        Props.load(Supplier { \"file://props.yaml\" })\n        asserts()\n    }\n\n    @Test\n    fun `test overrides`() {\n        val props = PropsLoader().load(\"file://overrides.yaml\", \"file://props.yaml\")\n        assertTrue(props.exists(\"a\"))\n        assertTrue(props.exists(\"a.b\"))\n        assertTrue(props.exists(\"a.c.d\"))\n        assertEquals(props.string(\"a.c.d\"), \"y1\") // override\n        assertEquals(props.string(\"a.b\"), \"x\")\n        assertEquals(props.int(\"a.e\"), 12)\n        assertEquals(props.boolean(\"a.f\"), true)\n        assertEquals(props.list(\"a.g\"), listOf(1, 2, 3)) // override => list append\n        assertEquals(props.long(\"a.i\", 10), 10)\n        val actual = props.map(\"a\")\n        val expected = mapOf(\n            \"b\" to \"x\", \"c.d\" to \"y1\", \"e\" to 12, \"f\" to true,\n            \"g\" to listOf(1, 2, 3), \"h\" to mapOf(\"h1\" to \"abc\", \"h2\" to \"H2\", \"h3\" to System.getProperty(\"user.home\")),\n            \"j\" to 1\n        )\n        println(expected)\n        println(actual)\n        assertEquals(expected, actual)\n        val actualFlattened = PropsLoader.flatten(props.map(\"a\"))\n        val expectedFlattened = mapOf(\n            \"b\" to \"x\", \"c.d\" to \"y1\", \"e\" to 12, \"f\" to true,\n            \"g\" to listOf(1, 2, 3), \"j\" to 1, \"h.h1\" to \"abc\", \"h.h2\" to \"H2\", \"h.h3\" to System.getProperty(\"user.home\")\n        )\n        println(actualFlattened)\n        println(expectedFlattened)\n        assertEquals(expectedFlattened, actualFlattened)\n    }\n\n    /*@Test\n    fun `test flatten and unflatten`() {\n        //val merged = Props.load(\"file://sub1-overrides.yaml\", \"file://sub1.yaml\").root()\n        val expected = mapOf(\n            \"a\" to \"b\", \"c\" to\n                    listOf(\n                        \"4\", \"5\", mapOf(\"i1\" to \"I1\"), mapOf(\"d1\" to \"D1\"), mapOf(\"G\" to \"H1\"),\n                        mapOf(\"g\" to \"h\"), mapOf(\n                            \"d\" to mapOf(\n                                \"d11\" to System.getProperty(\"java.home1\", \"acc\"),\n                                \"d22\" to \"D22\", \"e\" to \"E22\", \"l\" to\n                                        listOf(\n                                            mapOf(\"a\" to System.getProperty(\"java.io.tmpdir1\", \"Aaa\")),\n                                            mapOf(\"a1\" to \"b1\"), mapOf(\"c\" to \"d\"), mapOf(\"e\" to mapOf(\"f\" to \"F1\"))\n                                        )\n                            )\n                        ),\n                        mapOf(\"i\" to mapOf(\"j\" to \"k11\", \"l\" to \"m\", \"j1\" to \"J1\"))\n                    )\n        )\n        val flattened = PropsLoader.flatten(expected)\n        val unflattened = PropsLoader.unflatten(flattened)\n        assertEquals(expected, unflattened)\n    }\n\n    @Test\n    fun `test list substitutions`() {\n        val comparator = Comparator<Any> { o1, o2 -> o1.toString().compareTo(o2.toString()) }\n        val merged = Props.load(\"file://sub1-overrides.yaml\", \"file://sub1.yaml\").root()\n        val expected = mapOf(\n            \"a\" to \"b\", \"c\" to\n                    sortedSetOf(\n                        comparator,\n                        \"4\", \"5\", mapOf(\"i1\" to \"I1\"), mapOf(\"d1\" to \"D1\"), mapOf(\"G\" to \"H1\"),\n                        mapOf(\"g\" to \"h\"), mapOf(\n                            \"d\" to mapOf(\n                                \"d11\" to System.getProperty(\"java.home1\", \"acc\"),\n                                \"d22\" to \"D22\", \"e\" to \"E22\", \"l\" to\n                                        sortedSetOf(\n                                            comparator,\n                                            mapOf(\"a\" to System.getProperty(\"java.io.tmpdir1\", \"Aaa\")),\n                                            mapOf(\"a1\" to \"b1\"), mapOf(\"c\" to \"d\"), mapOf(\"e\" to mapOf(\"f\" to \"F1\"))\n                                        )\n                            )\n                        ),\n                        mapOf(\"i\" to mapOf(\"j\" to \"k11\", \"l\" to \"m\", \"j1\" to \"J1\"))\n                    )\n        )\n        val flattened = PropsLoader.flatten(merged) as Json\n        val unflattened = PropsLoader.unflatten(flattened)\n        println(\"merged: $merged\")\n        println(\"flatte: $flattened\")\n        println(\"unflat: $unflattened\")\n        println(\"expect: $expected\")\n        TODO(\"complete the asserts\")\n    }*/\n\n    @Test\n    fun `test substitutions in list`() {\n        val s = ResourceLoader.load(\"file://a.yaml\")\n        val yaml = typeRefYaml<Map<String, Any>>(s)\n\n\n        val merged = Props.load(\"file://b.yaml\", \"file://a.yaml\").root()\n        println(merged)\n        //val unflatten = PropsLoader.unflatten(merged.root())\n        //println(unflatten.yaml())\n    }\n\n    private fun asserts() {\n        assertTrue(exists(\"a\"))\n        assertTrue(exists(\"a.b\"))\n        assertTrue(exists(\"a.c.d\"))\n        assertEquals(string(\"a.c.d\"), \"y\")\n        assertEquals(string(\"a.b\"), \"x\")\n        assertEquals(int(\"a.e\"), 12)\n        assertEquals(boolean(\"a.f\"), true)\n        assertEquals(list(\"a.g\"), listOf(1, 2))\n        assertEquals(long(\"a.i\", 10), 10)\n        assertEquals(\n            PropsLoader.flatten(map(\"a\")), mapOf(\n                \"b\" to \"x\", \"c.d\" to \"y\", \"e\" to 12, \"f\" to true,\n                \"g\" to listOf(1, 2), \"h.h1\" to \"H1\", \"h.h2\" to \"H2\"\n            )\n        )\n    }\n}\n"
  },
  {
    "path": "commons/src/test/resources/a.yaml",
    "content": "# top level modules\nmodules:\n  - name: domain\n    class: com.walmartlabs.bigben.providers.domain.cassandra.CassandraModule\n  - name: processors\n    object: com.walmartlabs.bigben.processors.ProcessorRegistry\n  - name: hz\n    class: com.walmartlabs.bigben.utils.hz.Hz\n  - name: scheduler\n    object: com.walmartlabs.bigben.SchedulerModule\n  - name: events\n    object: com.walmartlabs.bigben.EventModule\n  - name: messaging\n    object: com.walmartlabs.bigben.kafka.KafkaModule\n    enabled: false\n  - name: cron\n    object: com.walmartlabs.bigben.cron.CronRunner\n    enabled: false\n\n# hazelcast properties\nhz:\n  template: file://hz.template.xml\n  group:\n    name: bigben-dev\n    password: bigben-dev\n  network:\n    autoIncrementPort: true\n    members: 127.0.0.1\n    port: 5701\n  map:\n    store:\n      writeDelay: 30\n\n# message related properties\nmessaging.producer.factory.class: com.walmartlabs.bigben.kafka.KafkaMessageProducerFactory\n\n# cassandra related properties\ncassandra:\n  keyspace: bigben\n  cluster:\n    contactPoints: 127.0.0.1\n    clusterName: bigben-cluster\n    port: 9042\n    localDataCenter: null\n    coreConnectionsPerHost: 8\n    maxHostsPerConnection: 32768\n    keepTCPConnectionAlive: true\n    connectionTimeOut: 5000\n    readTimeout: 12000\n    reconnectPeriod: 5\n    username: null\n    password: null\n    downgradingConsistency: false\n    writeConsistency: \"LOCAL_QUORUM\"\n    readConsistency: \"LOCAL_QUORUM\"\n\n# kafka consumer properties\nkafka:\n  consumers:\n    - num.consumers: 8\n      processor.impl.class: com.walmartlabs.bigben.kafka.ProcessorImpl\n      topics: ${inbound.topics.1}\n      max.poll.wait.time: 10000\n      message.retry.max.count: 10\n      config:\n        key.deserializer: org.apache.kafka.common.serialization.StringDeserializer\n        value.deserializer: org.apache.kafka.common.serialization.StringDeserializer\n        bootstrap.servers: ${inbound.bootstrap.servers.1}\n        #fetch.min.bytes: 1\n        group.id: bigben-inbound\n        #heartbeat.interval.ms: 3000\n        session.timeout.ms: 30000\n        auto.offset.reset: latest\n        fetch.max.bytes: 324000\n        max.poll.interval.ms: 30000\n        max.poll.records: 100\n        receive.buffer.bytes: 65536\n        request.timeout.ms: 60000\n        #send.buffer.bytes: 131072\n        enable.auto.commit: false\n  producer:\n    config: # this is default kafka producer config, these values will be used if not supplied during the tenant registration\n      key.serializer: org.apache.kafka.common.serialization.StringSerializer\n      value.serializer: org.apache.kafka.common.serialization.StringSerializer\n      acks: \"1\"\n      buffer.memory: 32400\n      retries: 3\n\n# system properties\ntask:\n  executor:\n    #retry.thread.count: 8\n    retry.time.units: SECONDS\n    delay: 1\n    max.retries: 3\n    backoff.multiplier: 2\n\napp.server.port: 8080\n\ngeneric.future.max.get.time: 60\n\nevents:\n  scheduler.enabled: true\n  schedule.scan.interval.minutes: 1\n  num.shard.submitters: 8\n  receiver:\n    shard.size: 1000\n    lapse.offset.minutes: 0\n    delete:\n      max.retries: 3\n      initial.delay: 1\n      backoff.multiplier: 1\n  submit:\n    initial.delay: 1\n    backoff.multiplier: 1\n    max.retries: 3\n  processor:\n    max.retries: 3\n    initial.delay: 1\n    backoff.multiplier: 2\n    eager.loading: true\n  tasks:\n    max.events.in.memory: 100000\n    scheduler.worker.threads: 8\n\n# bucket manager / loader related properties\nbuckets:\n  backlog.check.limit: 300\n  background:\n    load.fetch.size: 100\n    load.wait.interval.seconds: 15\n\ncron:\n  runner:\n    core.pool.size: 8\n  load:\n    max.retries: 10\n    delay: 1\n    backoff.multiplier: 1\n    time.units: \"SECONDS\""
  },
  {
    "path": "commons/src/test/resources/b.yaml",
    "content": "inbound.topics.1: my_topic\ninbound.bootstrap.servers.1: my_servers"
  },
  {
    "path": "commons/src/test/resources/log4j.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!--\n  #%L\n  BigBen:commons\n  =======================================\n  Copyright (C) 2016 - 2018 Walmart Inc.\n  =======================================\n  Licensed under the Apache License, Version 2.0 (the \"License\");\n  you may not use this file except in compliance with the License.\n  You may obtain a copy of the License at\n       http://www.apache.org/licenses/LICENSE-2.0\n  Unless required by applicable law or agreed to in writing, software\n  distributed under the License is distributed on an \"AS IS\" BASIS,\n  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  See the License for the specific language governing permissions and\n  limitations under the License.\n  #L%\n  -->\n\n<!DOCTYPE log4j:configuration SYSTEM \"log4j.dtd\">\n<log4j:configuration debug=\"true\"\n                     xmlns:log4j='http://jakarta.apache.org/log4j/'\n                     xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n                     xsi:schemaLocation=\"http://jakarta.apache.org/log4j/ \">\n\n    <appender name=\"console\" class=\"org.apache.log4j.ConsoleAppender\">\n        <layout class=\"org.apache.log4j.PatternLayout\">\n            <param name=\"ConversionPattern\"\n                   value=\"%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%t] %c{1}:%L - %m%n\"/>\n        </layout>\n    </appender>\n\n    <logger name=\"com.walmartlabs.bigben\" additivity=\"false\">\n        <level value=\"WARN\"/>\n        <appender-ref ref=\"console\"/>\n    </logger>\n\n    <logger name=\"com.walmartlabs.bigben.utils.commons.PropsLoader\" additivity=\"false\">\n        <level value=\"DEBUG\"/>\n        <appender-ref ref=\"console\"/>\n    </logger>\n\n</log4j:configuration>\n"
  },
  {
    "path": "commons/src/test/resources/overrides.yaml",
    "content": "---\na:\n  c.d: y1\n  g:\n    - 1\n    - 3\n  h:\n    h1: ${user.homE:-abc}\n    h3: ${user.home:-abc}\n  j: 1"
  },
  {
    "path": "commons/src/test/resources/props.yaml",
    "content": "---\na:\n  b: x\n  c.d: y\n  e: 12\n  f: true\n  g:\n    - 1\n    - 2\n  h:\n    h1: H1\n    h2: H2"
  },
  {
    "path": "commons/src/test/resources/sub1-overrides.yaml",
    "content": "a: b\nc:\n  - d:\n      d11: ${java.home1:-acc}\n      d22: D22\n      e: E22\n      l:\n        - a: ${java.io.tmpdir1:-Aaa}\n        - c: d\n        - e:\n            f: F1\n    d1: D1\n    G: H1\n  - i:\n      j: k11\n      j1: J1\n  - i1: I1\n  - 5\n\n"
  },
  {
    "path": "commons/src/test/resources/sub1.yaml",
    "content": "a: b\nc:\n  - d:\n      e: E1\n      l:\n        - a: b\n        - a1: b1\n  - g: h\n  - G: H\n  - i:\n      j: k\n      l: m\n  - 4\n\n"
  },
  {
    "path": "cron/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n    <parent>\n        <artifactId>bigben</artifactId>\n        <groupId>com.walmartlabs.bigben</groupId>\n        <version>1.0.7-SNAPSHOT</version>\n    </parent>\n\n    <artifactId>bigben-cron</artifactId>\n    <packaging>takari-jar</packaging>\n    <name>Bigben:cron</name>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.walmartlabs.bigben</groupId>\n            <artifactId>bigben-lib</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.cronutils</groupId>\n            <artifactId>cron-utils</artifactId>\n            <version>7.0.2</version>\n        </dependency>\n        <dependency>\n            <groupId>com.datastax.cassandra</groupId>\n            <artifactId>cassandra-driver-extras</artifactId>\n            <version>3.3.0</version>\n        </dependency>\n    </dependencies>\n\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.jetbrains.kotlin</groupId>\n                <artifactId>kotlin-maven-plugin</artifactId>\n            </plugin>\n        </plugins>\n    </build>\n\n</project>"
  },
  {
    "path": "cron/src/main/kotlin/com/walmartlabs/bigben/cron/cron-hz.kt",
    "content": "/*-\n * #%L\n * Bigben:cron\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.cron\n\nimport com.hazelcast.core.MapStore\nimport com.hazelcast.nio.ObjectDataInput\nimport com.hazelcast.nio.ObjectDataOutput\nimport com.hazelcast.nio.serialization.DataSerializable\nimport com.walmartlabs.bigben.entities.KV\nimport com.walmartlabs.bigben.extns.kvs\nimport com.walmartlabs.bigben.extns.save\nimport com.walmartlabs.bigben.utils.*\nimport com.walmartlabs.bigben.utils.commons.Props.int\n\n/**\n * Created by smalik3 on 7/3/18\n */\ndata class Crons @JvmOverloads constructor(var crons: MutableMap<String, Cron> = HashMap()) : DataSerializable {\n    override fun writeData(out: ObjectDataOutput) = out.run { writeInt(crons.size); crons.forEach { writeUTF(it.value.json()) } }\n    override fun readData(ins: ObjectDataInput) = ins.run { (1..readInt()).forEach { Cron::class.java.fromJson(readUTF()).apply { crons[cronId()] = this } } }\n}\n\nclass CronMapStore : MapStore<Int, Crons> {\n\n    private val l = logger<CronMapStore>()\n\n    override fun storeAll(map: Map<Int, Crons>) {\n        { map.entries.map { e -> save<KV> { it.key = e.key.toString(); it.column = \"\"; it.value = e.value.yaml() } }.reduce() }\n                .retriable(\"cron-map-store:store-all\").result { l.error(\"error in storing / updating crons for keys: ${map.keys}\", it.rootCause()!!); throw it.rootCause()!! }\n    }\n\n    override fun store(key: Int, value: Crons) = storeAll(mapOf(key to value))\n\n    override fun loadAllKeys(): Iterable<Int> = (1..int(\"cron.partitions.count\", 271)).toList()\n\n    override fun loadAll(keys: Collection<Int>): Map<Int, Crons> {\n        if (l.isInfoEnabled) l.info(\"bulk-loading cron keys: $keys, thread: ${Thread.currentThread().name}\")\n        return { keys.map { k -> kvs { it.key = k.toString(); it.column = \"\" }.transform { k to it!! }.catching { println(it!!.stackTrace); 0 to emptyList() } }.reduce() }\n                .retriable(\"cron-map-store:load-all\")\n                .result { l.error(\"error in loading crons for keys: $keys\", it.rootCause()); throw it.rootCause()!! }\n                .associate {\n                    if (it.second.isEmpty()) it.first to Crons()\n                    else it.second[0].key!!.toInt() to typeRefYaml(it.second[0].value!!)\n                }.apply { if (l.isInfoEnabled) this.filter { it.value.crons.isNotEmpty() }.apply { if (this.isNotEmpty()) l.info(\"crons loaded: $this}\") } }\n    }\n\n    override fun deleteAll(keys: Collection<Int>) = throw UnsupportedOperationException(\"this must never have happened, keys: $keys\")\n\n    override fun load(key: Int) = loadAll(listOf(key))[key]\n\n    override fun delete(key: Int) = throw UnsupportedOperationException(\"this must never have happened, key: $key\")\n}\n"
  },
  {
    "path": "cron/src/main/kotlin/com/walmartlabs/bigben/cron/cron-processors.kt",
    "content": "/*-\n * #%L\n * Bigben:cron\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.cron\n\nimport com.cronutils.model.CronType\nimport com.cronutils.model.definition.CronDefinitionBuilder\nimport com.cronutils.model.time.ExecutionTime\nimport com.cronutils.parser.CronParser\nimport com.hazelcast.map.AbstractEntryProcessor\nimport com.hazelcast.nio.ObjectDataInput\nimport com.hazelcast.nio.ObjectDataOutput\nimport com.hazelcast.nio.serialization.DataSerializable\nimport com.walmartlabs.bigben.extns.utc\nimport com.walmartlabs.bigben.utils.fromJson\nimport com.walmartlabs.bigben.utils.json\nimport com.walmartlabs.bigben.utils.typeRefJson\nimport java.io.Serializable\nimport java.time.ZoneId\nimport java.time.ZonedDateTime\n\n/**\n * Created by smalik3 on 7/6/18\n */\nabstract class DataSerializableEntryProcessor<K, T>(protected var value: String? = null, applyOnBackup: Boolean) : AbstractEntryProcessor<K, T>(applyOnBackup), DataSerializable {\n    override fun writeData(out: ObjectDataOutput) = out.run { writeUTF(value) }\n    override fun readData(`in`: ObjectDataInput) = `in`.run { value = readUTF() }\n}\n\nclass CronDeleteEntryProcessor(cronId: String? = null) : DataSerializableEntryProcessor<Int, Crons>(cronId, true), Serializable {\n    override fun process(entry: MutableMap.MutableEntry<Int, Crons?>): Any? {\n        return entry.setValue(entry.value.apply { this!!.crons.remove(value) }).let { null }\n    }\n}\n\nclass CronEntryProcessor(c: String? = null) : DataSerializableEntryProcessor<Int, Crons>(c, true) {\n    override fun process(entry: MutableMap.MutableEntry<Int, Crons?>): Any? {\n        val cron = Cron::class.java.fromJson(value!!)\n        return entry.setValue(entry.value.apply { CronRunner.crons.values.forEach { this!!.crons[cron.cronId()] = cron } }).let { null }\n    }\n}\n\nclass CronMatchExecutionTimeProcessor(millis: Long? = null) : DataSerializableEntryProcessor<Int, Crons>(millis?.toString(), true) {\n    override fun process(entry: MutableMap.MutableEntry<Int, Crons>): List<String> {\n        val zdt = utc(value!!.toLong())\n        return ArrayList(entry.value.crons.filter { it.value.executionTime().isMatch(zdt) }.values.map { it.json() })\n    }\n}\n\nclass CronUpdateExecutionTimeEntryProcessor(cronId: String? = null, lastExecution: String? = null) : DataSerializableEntryProcessor<Int, Crons>((cronId to lastExecution).json(), true) {\n    override fun process(entry: MutableMap.MutableEntry<Int, Crons?>): Any? {\n        val (cronId, lastExecution) = typeRefJson<Pair<String, String>>(value!!)\n        return entry.setValue(entry.value.apply { this!!.crons[cronId]?.let { it.lastExecutionTime = ZonedDateTime.parse(lastExecution) } }).let { null }\n    }\n}\n\nfun main(args: Array<String>) {\n    val c = CronParser(CronDefinitionBuilder.instanceDefinitionFor(CronType.UNIX)).parse(\"* * * * *\")\n    val et = ExecutionTime.forCron(c)\n    var zdt = ZonedDateTime.now(ZoneId.of(\"UTC\"))\n    var z = zdt\n    println(zdt)\n    (1..10).forEach {\n        val match = et.isMatch(z)\n        z = z.plusSeconds(1)\n        zdt = et.nextExecution(zdt).get()\n        println(\"match = $match, zdt = $zdt, z = $z\")\n    }\n}\n"
  },
  {
    "path": "cron/src/main/kotlin/com/walmartlabs/bigben/cron/cron.kt",
    "content": "/*-\n * #%L\n * Bigben:cron\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.cron\n\nimport com.cronutils.descriptor.CronDescriptor\nimport com.cronutils.model.CronType\nimport com.cronutils.model.definition.CronDefinitionBuilder\nimport com.cronutils.model.time.ExecutionTime\nimport com.cronutils.parser.CronParser\nimport com.fasterxml.jackson.annotation.JsonInclude\nimport com.fasterxml.jackson.annotation.JsonInclude.Include.NON_EMPTY\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.walmartlabs.bigben.BigBen.module\nimport com.walmartlabs.bigben.cron.CronRunner.crons\nimport com.walmartlabs.bigben.entities.Event\nimport com.walmartlabs.bigben.entities.EventResponse\nimport com.walmartlabs.bigben.entities.EventStatus\nimport com.walmartlabs.bigben.entities.KV\nimport com.walmartlabs.bigben.extns.*\nimport com.walmartlabs.bigben.processors.ProcessorRegistry\nimport com.walmartlabs.bigben.utils.*\nimport com.walmartlabs.bigben.utils.commons.Module\nimport com.walmartlabs.bigben.utils.commons.ModuleRegistry\nimport com.walmartlabs.bigben.utils.commons.Props.int\nimport com.walmartlabs.bigben.utils.hz.Hz\nimport java.time.ZonedDateTime\nimport java.time.temporal.ChronoUnit\nimport java.time.temporal.ChronoUnit.*\nimport java.util.*\nimport java.util.concurrent.ConcurrentHashMap\nimport java.util.concurrent.Executors.newScheduledThreadPool\nimport java.util.concurrent.TimeUnit.SECONDS\nimport java.util.concurrent.atomic.AtomicInteger\nimport java.util.concurrent.atomic.AtomicReference\n\n/**\n * Created by smalik3 on 6/29/18\n */\n@JsonInclude(NON_EMPTY)\ndata class Cron(\n    val id: String, val expression: String, val type: CronType, val tenant: String,\n    var lastExecutionTime: ZonedDateTime?, val lastUpdated: ZonedDateTime?,\n    val tracingEnabled: Boolean = false, val tracingGranularity: ChronoUnit = DAYS\n) {\n\n    private val computed = ConcurrentHashMap<Int, Any>()\n\n    private fun parsed() = computed.computeIfAbsent(0) { CronRunner.parser(type).parse(expression)!! } as com.cronutils.model.Cron\n    internal fun executionTime() = computed.computeIfAbsent(1) { println(\"computing\"); ExecutionTime.forCron(parsed()) } as ExecutionTime\n\n    init {\n        require(tracingGranularity in supportedGranularities) { \"only $supportedGranularities granularities are supported\" }\n    }\n\n    fun cronId() = cronId(tenant, id, type)\n    override fun toString() = \"${cronId()}:$expression\"\n\n    companion object {\n        fun cronId(tenant: String, id: String, type: CronType) = \"$tenant/$id/$type\"\n        private val supportedGranularities = setOf(ChronoUnit.SECONDS, MINUTES, HOURS, DAYS, WEEKS, MONTHS, YEARS)\n    }\n\n    fun toGranularity(zdt: ZonedDateTime): String {\n        return when (tracingGranularity) {\n            YEARS -> zdt.year.toString()\n            MONTHS -> \"${zdt.year}/${zdt.monthValue}\"\n            WEEKS -> \"${zdt.year}/${zdt.monthValue}/${WEEKS.between(zdt.withDayOfMonth(1), zdt)}\"\n            DAYS -> \"${zdt.year}/${zdt.dayOfYear}\"\n            HOURS -> \"${zdt.year}/${zdt.dayOfYear}/${zdt.hour}\"\n            MINUTES -> \"${zdt.year}/${zdt.dayOfYear}/${zdt.hour}/${zdt.minute}\"\n            SECONDS -> \"${zdt.year}/${zdt.dayOfYear}/${zdt.hour}/${zdt.minute}/${zdt.second}\"\n            else -> throw IllegalArgumentException(\"unsupported unit: $tracingGranularity\")\n        }\n    }\n\n    fun describe(locale: Locale = Locale.US) = CronDescriptor.instance(locale).run { describe(parsed()) }!!\n}\n\nobject CronRunner : Module {\n\n    private val l = logger<CronRunner>()\n    internal val crons = module<Hz>().hz.getMap<Int, Crons>(\"crons\")\n\n    override fun init(registry: ModuleRegistry) {\n        l.info(\"initializing the cron module: starting the cron runner(s)\")\n        val lastRun = AtomicReference<ZonedDateTime?>()\n        workers.scheduleAtFixedRate({\n                                        try {\n                                            val now = nowUTC().withNano(0)\n                                            if (lastRun.get() == null || now > lastRun.get()) {\n                                                lastRun.set(now)\n                                                val nowString = now.toString()\n                                                @Suppress(\"UNCHECKED_CAST\")\n                                                val matches = (crons.executeOnKeys(\n                                                    crons.localKeySet(), CronMatchExecutionTimeProcessor(\n                                                        now.toInstant().toEpochMilli()\n                                                    )\n                                                ) as MutableMap<Int, List<String>>).values.flatten()\n                                                    .map { Cron::class.java.fromJson(it) }\n                                                if (matches.isNotEmpty()) {\n                                                    matches.map { c ->\n                                                        val e = EventResponse(\n                                                            c.id, nowString, c.tenant, eventId = \"${c.type}/$nowString\",\n                                                            triggeredAt = nowString, eventStatus = EventStatus.TRIGGERED, payload = c.expression\n                                                        ).event()\n                                                        if (l.isDebugEnabled) l.debug(\"triggering event for cron: ${c.cronId()} at $nowString\")\n                                                        module<ProcessorRegistry>()(e).transformAsync { updateCronExecutionTime(c, now, it!!) }\n                                                    }.reduce()\n                                                        .done({ l.error(\"cron-failed: time: $nowString, crons: ${matches.map { it.cronId() }}\") })\n                                                        { if (l.isDebugEnabled) l.debug(\"cron-successful: time: $nowString, crons: ${matches.map { it.cronId() }}\") }\n                                                }\n                                            }\n                                        } catch (e: Exception) {\n                                            l.error(\"error in running cron\", e.rootCause()!!)\n                                        }\n                                    }, 0, 1, SECONDS)\n    }\n\n    private val parsers = ConcurrentHashMap<CronType, CronParser>()\n    internal fun parser(type: CronType) = parsers.computeIfAbsent(type) { CronParser(CronDefinitionBuilder.instanceDefinitionFor(type)) }\n\n    private val index = AtomicInteger()\n    private val workers =\n        newScheduledThreadPool(int(\"cron.runner.core.pool.size\")) { Thread(it, \"cron-runner#${index.incrementAndGet()}\") }\n\n    private fun updateCronExecutionTime(\n        cron: Cron,\n        executionTime: ZonedDateTime,\n        event: Event\n    ): ListenableFuture<Cron> {\n        val f =\n            crons.submitToKey(cron.partition(), CronUpdateExecutionTimeEntryProcessor(cron.cronId(), executionTime.toString()))\n                .listenable().transform { cron }\n        return if (cron.tracingEnabled) {\n            f.transformAsync {\n                save<KV> {\n                    it.key = \"${cron.cronId()}:${cron.toGranularity(executionTime)}\"\n                    it.column = executionTime.toString(); it.value = event.toResponse().yaml()\n                }.transform { cron }\n            }\n        } else f\n    }\n}\n\nprivate fun Cron.partition() = module<Hz>().hz.partitionService.getPartition(cronId()).partitionId\nprivate fun String.partition() = module<Hz>().hz.partitionService.getPartition(this).partitionId\n\nobject CronService {\n\n    private val l = logger<CronService>()\n\n    fun upsert(cron: Cron) = response {\n        if (l.isInfoEnabled) l.info(\"creating/updating cron: $cron\")\n        val cronId = cron.cronId()\n        val pId = cron.partition()\n        if (l.isDebugEnabled) l.debug(\"cron: $cronId hashed to partition: $pId\")\n        crons.executeOnKey(pId, CronEntryProcessor(cron.copy(lastUpdated = nowUTC(), lastExecutionTime = null).json()))\n        if (l.isDebugEnabled) l.debug(\"cron: $cronId updated successfully\")\n        mapOf(\"status\" to \"OK\")\n    }\n\n    fun delete(tenant: String, id: String, type: String) = response {\n        val types = if (type == \"*\") CronType.values().toSet() else setOf(CronType.valueOf(type))\n        if (l.isInfoEnabled) l.info(\"deleting cron: $tenant/$id, types: $types\")\n        types.forEach {\n            val cronId = Cron.cronId(tenant, id, it)\n            val pId = cronId.partition()\n            if (l.isDebugEnabled) l.debug(\"cron: $cronId hashed to partition: $pId\")\n            crons.executeOnKey(pId, CronDeleteEntryProcessor(cronId))\n            if (l.isDebugEnabled) l.debug(\"cron: $cronId deleted successfully\")\n        }\n        mapOf(\"status\" to \"OK\")\n    }\n\n    @JsonInclude(NON_EMPTY)\n    data class CronDescription(val cron: Cron, val description: String?)\n\n    fun get(tenant: String, id: String, describe: Boolean?) = response {\n        crons.values.flatMap { it.crons.values.filter { it.tenant == tenant && it.id == id } }\n            .map { CronDescription(it, describe?.run { it.describe() }) }\n    }\n\n    fun describe(cron: Cron) = response {\n        CronDescription(cron, cron.describe())\n    }\n}\n"
  },
  {
    "path": "kafka/LICENSE.txt",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2018 Sandeep Malik\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "kafka/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n    <parent>\n        <artifactId>bigben</artifactId>\n        <groupId>com.walmartlabs.bigben</groupId>\n        <version>1.0.7-SNAPSHOT</version>\n    </parent>\n\n    <artifactId>bigben-kafka</artifactId>\n    <packaging>takari-jar</packaging>\n    <name>Bigben:kafka</name>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.walmartlabs.bigben</groupId>\n            <artifactId>bigben-lib</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.kafka</groupId>\n            <artifactId>kafka-clients</artifactId>\n            <version>1.0.0</version>\n        </dependency>\n    </dependencies>\n\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.jetbrains.kotlin</groupId>\n                <artifactId>kotlin-maven-plugin</artifactId>\n            </plugin>\n        </plugins>\n    </build>\n\n</project>"
  },
  {
    "path": "kafka/src/main/kotlin/com/walmartlabs/bigben/kafka/kafka-mocks.kt",
    "content": "/*-\n * #%L\n * bigben-kafka\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.kafka\n\nimport com.google.common.util.concurrent.Futures\nimport com.google.common.util.concurrent.Futures.immediateFailedFuture\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.walmartlabs.bigben.entities.EventResponse\nimport com.walmartlabs.bigben.processors.MessageProducerFactory\nimport com.walmartlabs.bigben.utils.Json\nimport com.walmartlabs.bigben.utils.commons.PropsLoader\nimport org.apache.kafka.clients.consumer.Consumer\nimport org.apache.kafka.clients.consumer.ConsumerRecord\nimport org.apache.kafka.clients.consumer.MockConsumer\nimport org.apache.kafka.clients.consumer.OffsetResetStrategy.EARLIEST\nimport org.apache.kafka.clients.producer.MockProducer\nimport org.apache.kafka.common.serialization.StringSerializer\nimport java.util.concurrent.atomic.AtomicReference\n\n/**\n * Created by smalik3 on 6/28/18\n */\nclass MockMessageProducerFactory : MessageProducerFactory {\n    companion object {\n        val LAST_MESSAGE = AtomicReference<EventResponse>()\n    }\n    override fun create(tenant: String, props: Json) = object : KafkaMessageProducer(tenant, props) {\n        override fun createProducer(props: Json) = MockProducer<String, String>(true, StringSerializer(), StringSerializer())\n        override fun produce(e: EventResponse): ListenableFuture<*> {\n            return if (props.containsKey(\"fail\")) {\n                immediateFailedFuture<Any>(Exception()) as ListenableFuture<*>\n            } else super.produce(e).apply { LAST_MESSAGE.set(e) }\n        }\n    }\n}\n\nclass MockKafkaProcessor(props: PropsLoader) : KafkaMessageProcessor(props) {\n    lateinit var consumer: MockConsumer<String, String>\n    override fun createConsumer(): Consumer<String, String> = MockConsumer<String, String>(EARLIEST).apply { consumer = this }\n    override fun process(cr: ConsumerRecord<String, String>) = Futures.immediateFuture(\"\" as Any)!!\n}\n"
  },
  {
    "path": "kafka/src/main/kotlin/com/walmartlabs/bigben/kafka/kafka-module.kt",
    "content": "/*-\n * #%L\n * bigben-kafka\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.kafka\n\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.walmartlabs.bigben.utils.Json\nimport com.walmartlabs.bigben.utils.commons.Module\nimport com.walmartlabs.bigben.utils.commons.ModuleRegistry\nimport com.walmartlabs.bigben.utils.commons.Props\nimport com.walmartlabs.bigben.utils.commons.PropsLoader\nimport com.walmartlabs.bigben.utils.done\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.reduce\nimport com.walmartlabs.bigben.utils.retriable\nimport com.walmartlabs.bigben.utils.rootCause\nimport com.walmartlabs.bigben.utils.transform\nimport org.apache.kafka.clients.consumer.CommitFailedException\nimport org.apache.kafka.clients.consumer.Consumer\nimport org.apache.kafka.clients.consumer.ConsumerRebalanceListener\nimport org.apache.kafka.clients.consumer.ConsumerRecord\nimport org.apache.kafka.clients.consumer.KafkaConsumer\nimport org.apache.kafka.clients.consumer.OffsetAndMetadata\nimport org.apache.kafka.common.TopicPartition\nimport org.apache.kafka.common.errors.WakeupException\nimport java.lang.Thread.currentThread\nimport java.util.concurrent.Executors.newFixedThreadPool\nimport java.util.concurrent.atomic.AtomicBoolean\nimport java.util.concurrent.atomic.AtomicInteger\nimport java.util.concurrent.atomic.AtomicReference\nimport java.util.concurrent.locks.Lock\nimport java.util.concurrent.locks.ReentrantLock\nimport kotlin.concurrent.withLock\n\nobject KafkaModule : Module {\n\n    private val l = logger<KafkaModule>()\n\n    override fun init(registry: ModuleRegistry) {\n        l.info(\"initializing kafka processor(s)\")\n        Props.list(\"kafka.consumers\").forEach {\n            @Suppress(\"UNCHECKED_CAST\")\n            val p = Props.parse(it as Json)\n            require(p.exists(\"config.group.id\")) { \"group.id is required\" }\n            val index = AtomicInteger(0)\n            val numConsumers = p.int(\"num.consumers\")\n            l.info(\"creating $numConsumers kafka consumers\")\n            newFixedThreadPool(numConsumers) {\n                Thread(it, \"kafkaProcessor[${p.string(\"config.group.id\")}]#${index.getAndIncrement()}\")\n            }.apply {\n                (1..numConsumers).forEach {\n                    l.debug(\"creating kafka consumer: $it\")\n                    submit(try {\n                        \"processor.impl.class\".run {\n                            require(p.exists(this)) { \"$this is required\" }\n                            Class.forName(p.string(this)).let {\n                                require(KafkaMessageProcessor::class.java.isAssignableFrom(it)) { \"processor class must extend ${KafkaMessageProcessor::class.java.simpleName}\" }\n                                it.getConstructor(PropsLoader::class.java).newInstance(p) as Runnable\n                            }\n                        }\n                    } catch (e: Exception) {\n                        l.error(\"unexpected error in starting kafka processor\", e.rootCause())\n                        throw IllegalArgumentException(e)\n                    })\n                }\n            }\n        }\n    }\n}\n\nabstract class KafkaMessageProcessor(private val props: PropsLoader) : Runnable {\n    private val topics = props.string(\"topics\").split(\",\")\n    private val closed = AtomicBoolean()\n    private val autoCommit = props.boolean(\"config.enable.auto.commit\")\n    private var numUnknownExceptionRetries = props.int(\"unknown.exception.retries\", 5)\n\n    companion object {\n        private val l = logger<KafkaMessageProcessor>()\n    }\n\n    abstract fun process(cr: ConsumerRecord<String, String>): ListenableFuture<Any>\n    open fun createConsumer(): Consumer<String, String> = KafkaConsumer<String, String>(props.map(\"config\"))\n\n    override fun run() {\n        try {\n            process()\n        } catch (e: Exception) {\n            l.error(\"error in running kafka processor\", e.rootCause())\n        }\n    }\n\n    private fun process() {\n        val consumer = createConsumer()\n        if (l.isInfoEnabled) {\n            l.info(\"starting the kafka consumer ${currentThread().name} for topic(s): $topics\")\n            if (!autoCommit)\n                l.info(\"offsets will be committed manually\")\n        }\n        val owned = AtomicReference<Set<TopicPartition>?>()\n        consumer.subscribe(topics, object : ConsumerRebalanceListener {\n            override fun onPartitionsAssigned(partitions: MutableCollection<TopicPartition>) {\n                if (l.isDebugEnabled) l.debug(\"partitions assigned: ${partitions.groupBy { it.topic() }.mapValues {\n                    it.value.map { it.partition() }.toSortedSet()\n                }.toSortedMap()}\")\n                owned.set(partitions.toSet())\n            }\n\n            override fun onPartitionsRevoked(partitions: MutableCollection<TopicPartition>) {\n                if (l.isDebugEnabled) l.debug(\"partitions revoked: ${partitions.groupBy { it.topic() }.mapValues {\n                    it.value.map { it.partition() }.toSortedSet()\n                }.toSortedMap()}\")\n                owned.set(null)\n            }\n        })\n        val tasks = mutableListOf<() -> Unit>()\n        val inPoll = AtomicBoolean(false)\n        val taskLock: Lock = ReentrantLock()\n\n        while (!closed.get()) {\n            try {\n                taskLock.withLock {\n                    if (l.isDebugEnabled) l.debug(\"processing pending tasks\")\n                    tasks.forEach { it() }\n                    tasks.clear()\n                    if (l.isDebugEnabled) l.debug(\"pending tasks processed successfully\")\n                }\n                inPoll.set(true)\n                if (l.isDebugEnabled) l.debug(\"starting the poll for topic(s): $topics\")\n                val records = consumer.poll(props.long(\"max.poll.wait.time\"))\n                inPoll.set(false)\n                if (l.isDebugEnabled) l.debug(\"fetched ${records.count()} messages from topic(s): $topics\")\n                if (records.count() > 0) {\n                    val (offsets, range) = records.groupBy { TopicPartition(it.topic(), it.partition()) }.run {\n                        mapValues { OffsetAndMetadata(it.value.maxBy { it.offset() }!!.offset() + 1) } to\n                                mapValues { \"[${it.value.minBy { it.offset() }!!.offset()}-${it.value.maxBy { it.offset() }!!.offset()}]\" }\n                                        .mapKeys { \"${it.key.topic()}[${it.key.partition()}]\" }.toSortedMap()\n                    }\n                    val partitions = records.partitions().apply {\n                        if (l.isDebugEnabled)\n                            l.debug(\"pausing the partitions ${groupBy { it.topic() }.mapValues {\n                                it.value.map { it.partition() }.toSortedSet()\n                            }.toSortedMap()}\")\n                        consumer.pause(this)\n                    }; {\n                        if (l.isDebugEnabled)\n                            l.debug(\"resuming the partitions ${partitions.groupBy { it.topic() }.mapValues {\n                                it.value.map { it.partition() }.toSortedSet()\n                            }.toSortedMap()}\")\n                        consumer.resume(partitions intersect (owned.get() ?: emptySet()))\n                        val ownedSnapshot = owned.get()\n                        if (!autoCommit && ownedSnapshot != null) {\n                            val filtered = offsets.filterKeys { it in ownedSnapshot }\n                            if (l.isDebugEnabled) l.debug(\"committing offsets $filtered\")\n                            try {\n                                consumer.commitSync(filtered)\n                            } catch (e: CommitFailedException) {\n                                l.warn(\"bulk commit failed for offsets: $filtered, trying to each owned partition commit one by one\")\n                                offsets.forEach {\n                                    // no snapshot here\n                                    if (owned.get() != null && it.key in owned.get()!!) {\n                                        try {\n                                            consumer.commitSync(mapOf(it.key to it.value))\n                                        } catch (e: Exception) {\n                                            l.warn(\"error in committing offset for ${it.key}, ignoring\")\n                                        }\n                                    } else l.info(\"partition ${it.key} is no more owned by this consumer, ignoring the offset commit\")\n                                }\n                            }\n                        }\n                    }.apply {\n                        if (l.isDebugEnabled) l.debug(\"submitting records for processing: $range\")\n                        records.map {\n                            { process(it) }.retriable(\n                                    \"${it.topic()}/${it.partition()}/${it.offset()}/${it.key()}\",\n                                    maxRetries = props.int(\"message.retry.max.count\")\n                            )\n                        }.reduce().transform { this }.done({\n                            l.error(\"error in processing messages: $range\", it.rootCause())\n                            taskLock.withLock { tasks += this }\n                            if (l.isDebugEnabled) l.debug(\"adding tasks for partition resume and offset commits\")\n                            if (inPoll.get()) {\n                                if (l.isDebugEnabled) l.debug(\"waking up consumer stuck in poll\")\n                                consumer.wakeup()\n                            }\n                        }) {\n                            if (l.isDebugEnabled) {\n                                l.debug(\"messages processed successfully: $range\")\n                                l.debug(\"adding tasks for partition resume and offset commits\")\n                            }\n                            taskLock.withLock { tasks += this }\n                            if (inPoll.get()) {\n                                if (l.isDebugEnabled) l.debug(\"waking up consumer stuck in poll\")\n                                consumer.wakeup()\n                            }\n                        }\n                    }\n                }\n            } catch (e: Exception) {\n                val rc = e.rootCause()\n                if (rc is WakeupException) {\n                    if (closed.get()) {\n                        l.info(\"consumer has been closed for topic(s): $topics\")\n                    }\n                } else {\n                    if (numUnknownExceptionRetries-- > 0) l.warn(\"unknown exception, ignoring\", rc)\n                    else l.error(\n                            \"unknown exception, giving up after $numUnknownExceptionRetries retries, closing the consumer\",\n                            rc\n                    )\n                    closed.set(true)\n                }\n            }\n        }\n    }\n}"
  },
  {
    "path": "kafka/src/main/kotlin/com/walmartlabs/bigben/kafka/kafka-processor.kt",
    "content": "/*-\n * #%L\n * bigben-kafka\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.kafka\n\nimport com.google.common.util.concurrent.Futures.immediateFailedFuture\nimport com.google.common.util.concurrent.Futures.immediateFuture\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.walmartlabs.bigben.BigBen\nimport com.walmartlabs.bigben.api.EventReceiver\nimport com.walmartlabs.bigben.entities.EventRequest\nimport com.walmartlabs.bigben.entities.EventStatus.*\nimport com.walmartlabs.bigben.entities.Mode.UPSERT\nimport com.walmartlabs.bigben.extns.event\nimport com.walmartlabs.bigben.processors.ProcessorRegistry\nimport com.walmartlabs.bigben.utils.commons.PropsLoader\nimport com.walmartlabs.bigben.utils.fromJson\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.rootCause\nimport com.walmartlabs.bigben.utils.transformAsync\nimport org.apache.kafka.clients.consumer.ConsumerRecord\n\nclass ProcessorImpl(props: PropsLoader) : KafkaMessageProcessor(props) {\n\n    companion object {\n        private val l = logger<ProcessorImpl>()\n    }\n\n    private val badMessageMarker = immediateFuture(null)\n    private val eventReceiver = BigBen.module<EventReceiver>()\n    private val processorRegistry = BigBen.module<ProcessorRegistry>()\n\n    override fun process(cr: ConsumerRecord<String, String>): ListenableFuture<Any> {\n        return ((try {\n            EventRequest::class.java.fromJson(cr.value())\n        } catch (e: Exception) {\n            l.warn(\"bad message format, dropping: ${cr.value()}, error: ${e.rootCause()?.message}\"); null\n        })?.run {\n            if (l.isDebugEnabled) l.debug(\"received audit event: $this\")\n            try {\n                if (mode == UPSERT) eventReceiver.addEvent(this).transformAsync {\n                    if (it!!.eventStatus == TRIGGERED) processorRegistry.invoke(it.event())\n                    else if (it.eventStatus == ERROR || it.eventStatus == REJECTED) {\n                        l.warn(\"event request is rejected or had error, event response: $it\")\n                    }\n                    immediateFuture(it)\n                } else eventReceiver.removeEvent(id!!, tenant!!)\n            } catch (e: Exception) {\n                val rc = e.rootCause()!!\n                l.error(\"failed to process message: $cr\", rc)\n                immediateFailedFuture<Any>(rc)\n            }\n        } ?: badMessageMarker).run {\n            @Suppress(\"UNCHECKED_CAST\")\n            this as ListenableFuture<Any>\n        }\n    }\n}\n"
  },
  {
    "path": "kafka/src/main/kotlin/com/walmartlabs/bigben/kafka/kafka-producer.kt",
    "content": "/*-\n * #%L\n * bigben-kafka\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.kafka\n\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.google.common.util.concurrent.SettableFuture\nimport com.walmartlabs.bigben.entities.EventDeliveryOption.FULL_EVENT\nimport com.walmartlabs.bigben.entities.EventResponse\nimport com.walmartlabs.bigben.processors.MessageProducer\nimport com.walmartlabs.bigben.processors.MessageProducerFactory\nimport com.walmartlabs.bigben.utils.Json\nimport com.walmartlabs.bigben.utils.commons.Module\nimport com.walmartlabs.bigben.utils.commons.ModuleRegistry\nimport com.walmartlabs.bigben.utils.json\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.rootCause\nimport org.apache.kafka.clients.producer.KafkaProducer\nimport org.apache.kafka.clients.producer.Producer\nimport org.apache.kafka.clients.producer.ProducerRecord\n\n/**\n * Created by smalik3 on 6/25/18\n */\nclass KafkaMessageProducerFactory : MessageProducerFactory, Module {\n    override fun init(registry: ModuleRegistry) {\n    }\n\n    override fun create(tenant: String, props: Json) = KafkaMessageProducer(tenant, props)\n}\n\nopen class KafkaMessageProducer(private val tenant: String, props: Json) : MessageProducer {\n\n    companion object {\n        val l = logger<KafkaMessageProducer>()\n    }\n\n    private val kafkaProducer = this.createProducer(props)\n    private val topic = require(props.containsKey(\"topic\")) { \"no topic in props\" }.run { props[\"topic\"]!!.toString() }\n\n    protected open fun createProducer(props: Json): Producer<String, String> =\n        KafkaProducer<String, String>(props).apply { if (l.isInfoEnabled) l.info(\"kafka producer for tenant $tenant created successfully\") }\n\n    override fun produce(e: EventResponse): ListenableFuture<*> {\n        if (l.isDebugEnabled) l.debug(\"producer:begin: tenant: $tenant, topic: $topic, event: ${e.id}\")\n        return SettableFuture.create<Any>().apply {\n            val content = if (e.deliveryOption == null || e.deliveryOption == FULL_EVENT) e.json() else e.payload\n            kafkaProducer.send(ProducerRecord(topic, e.id, content)) { r, exception ->\n                if (exception != null) {\n                    l.error(\"producer:error: tenant: $tenant, topic: $topic, event: ${e.id}, failure\", exception.rootCause())\n                    setException(exception.rootCause()!!)\n                } else {\n                    if (l.isDebugEnabled) l.debug(\"successfully published, event: ${e.tenant}/${e.id}, topic: ${r.topic()}, partition: ${r.partition()}, offset: ${r.offset()}\")\n                    set(e)\n                }\n            }\n        }\n    }\n}"
  },
  {
    "path": "lib/LICENSE.txt",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2018 Sandeep Malik\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "lib/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n    <parent>\n        <groupId>com.walmartlabs.bigben</groupId>\n        <artifactId>bigben</artifactId>\n        <version>1.0.7-SNAPSHOT</version>\n    </parent>\n\n    <artifactId>bigben-lib</artifactId>\n    <packaging>takari-jar</packaging>\n    <name>BigBen:lib</name>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.walmartlabs.bigben</groupId>\n            <artifactId>bigben-commons</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.ning</groupId>\n            <artifactId>async-http-client</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>org.jetbrains.kotlin</groupId>\n            <artifactId>kotlin-reflect</artifactId>\n        </dependency>\n    </dependencies>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.jetbrains.kotlin</groupId>\n                <artifactId>kotlin-maven-plugin</artifactId>\n            </plugin>\n        </plugins>\n    </build>\n</project>"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/BigBen.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben\n\nimport com.walmartlabs.bigben.entities.EntityProvider\nimport com.walmartlabs.bigben.utils.commons.ModuleRegistry\nimport com.walmartlabs.bigben.utils.commons.Props\nimport com.walmartlabs.bigben.utils.commons.Props.load\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.rootCause\n\n/**\n * Created by smalik3 on 6/24/18\n */\n\nobject BigBen {\n    private val l = logger<BigBen>()\n\n    val registry = ModuleRegistry()\n\n    inline fun <reified T> module() = registry.module<T>()\n    inline fun <reified T> entityProvider() = registry.module<EntityProvider<T>>()\n\n    fun init() {\n        Initializer\n    }\n\n    private object Initializer {\n        init {\n            System.getProperty(\"bigben.configs\")?.run {\n                val configs = this.split(\",\")\n                l.info(\"using configs: $configs\")\n                load(*configs.toTypedArray())\n            } ?: {\n                l.warn(\"no 'bigben.configs' system property set, using the default: file://bigben.yaml\")\n                load(\"file://bigben.yaml\")\n            }()\n            l.info(\"initiating module registration\")\n            try {\n                BigBen.registry.loadModules(Props)\n            } catch (e: Throwable) {\n                l.error(\"error in loading modules, system will exit now\", e.rootCause())\n                //exitProcess(1)\n                throw ExceptionInInitializerError(e.rootCause())\n            }\n            l.info(\"module registration is complete\")\n            l.info(\"BigBen initialized successfully\")\n        }\n    }\n}"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/api/EventReceiver.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.api\n\nimport com.google.common.util.concurrent.Futures.immediateFuture\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.hazelcast.map.EntryBackupProcessor\nimport com.hazelcast.map.EntryProcessor\nimport com.hazelcast.nio.ObjectDataInput\nimport com.hazelcast.nio.ObjectDataOutput\nimport com.walmartlabs.bigben.BigBen.entityProvider\nimport com.walmartlabs.bigben.BigBen.module\nimport com.walmartlabs.bigben.core.ScheduleScanner.Companion.BUCKET_CACHE\nimport com.walmartlabs.bigben.entities.*\nimport com.walmartlabs.bigben.entities.EventDeliveryOption.PAYLOAD_ONLY\nimport com.walmartlabs.bigben.entities.EventStatus.*\nimport com.walmartlabs.bigben.extns.*\nimport com.walmartlabs.bigben.hz.HzObjectFactory.ObjectId.EVENT_RECEIVER_ADD_EVENT\nimport com.walmartlabs.bigben.processors.ProcessorRegistry\nimport com.walmartlabs.bigben.utils.*\nimport com.walmartlabs.bigben.utils.commons.Props\nimport com.walmartlabs.bigben.utils.hz.Hz\nimport java.time.ZonedDateTime\nimport kotlin.collections.MutableMap.MutableEntry\n\n/**\n * Created by smalik3 on 2/26/18\n */\nclass EventReceiver(val hz: Hz) {\n\n    companion object {\n        private val l = logger<EventReceiver>()\n        internal val CACHED_PROCESSOR = CountIncrementer()\n    }\n\n    private val scanInterval = Props.int(\"events.schedule.scan.interval.minutes\")\n    private val lapseOffset = Props.int(\"events.receiver.lapse.offset.minutes\")\n\n    init {\n        if (l.isInfoEnabled) l.info(\"using event lapseOffset: {} minutes\", lapseOffset)\n    }\n\n    fun addEvent(eventRequest: EventRequest): ListenableFuture<EventResponse> {\n        return validate(eventRequest)?.let { it } ?: {\n            val eventTime = ZonedDateTime.parse(eventRequest.eventTime)\n            val bucketId = utc(bucketize(eventTime.toInstant().toEpochMilli(), scanInterval))\n            fetch<EventLookup> { it.xrefId = eventRequest.id; it.tenant = eventRequest.tenant }\n                .transformAsync {\n                    if (it != null) {\n                        if (it.eventTime == eventTime) {\n                            if (l.isDebugEnabled) l.debug(\"{}, event update received, no change in event time\", eventRequest.id)\n                            save<Event> { e -> e.bucketId = it.bucketId; e.shard = it.shard; e.eventTime = it.eventTime; e.id = it.eventId; e.payload = eventRequest.payload }.transform {\n                                if (l.isDebugEnabled) l.debug(\"{}, event updated successfully\", eventRequest.id)\n                                eventRequest.toResponse().apply { eventId = it!!.id; eventStatus = UPDATED }\n                            }\n                        } else {\n                            if (l.isDebugEnabled) l.debug(\"event update received, event time changed, add new event -> update existing look up -> delete old event\")\n                            val oldLookup = it\n                            addEvent0(eventRequest, bucketId, eventTime).transformAsync {\n                                addLookup0(eventRequest, bucketId, it!!.shard!!, it.id!!, eventTime).transformAsync { removeEvent0(oldLookup) }.transform {\n                                    eventRequest.toResponse().apply { eventId = it!!.eventId; eventStatus = UPDATED }\n                                }\n                            }\n                        }\n                    } else {\n                        addEvent0(eventRequest, bucketId, eventTime).transformAsync {\n                            addLookup0(eventRequest, it!!.bucketId!!, it.shard!!, it.id!!, it.eventTime!!).transform {\n                                if (l.isDebugEnabled) l.debug(\"{}, add-event: successful\", it!!.xrefId)\n                                eventRequest.toResponse().apply { eventId = it!!.eventId; eventStatus = ACCEPTED }\n                            }\n                        }\n                    }\n                }.catching {\n                    l.error(\"failed to add event: {}\", eventRequest.id, it.rootCause())\n                    eventRequest.toResponse().apply { eventStatus = ERROR }\n                }\n        }()\n    }\n\n    private fun addLookup0(eventRequest: EventRequest, bucketId: ZonedDateTime, shard: Int, eventId: String, eventTime: ZonedDateTime): ListenableFuture<EventLookup> {\n        return save {\n            it.tenant = eventRequest.tenant\n            it.xrefId = eventRequest.id\n            it.bucketId = bucketId\n            it.shard = shard\n            it.eventTime = eventTime\n            it.eventId = eventId\n            if (l.isDebugEnabled) l.debug(\"{}, add-event: event-lookup-table: insert\", eventRequest.id)\n        }\n    }\n\n    private fun addEvent0(eventRequest: EventRequest, bucketId: ZonedDateTime, eventTime: ZonedDateTime): ListenableFuture<Event> {\n        return hz.hz.getMap<ZonedDateTime, Bucket>(BUCKET_CACHE).let {\n            it.submitToKey(bucketId, CACHED_PROCESSOR).listenable().transformAsync {\n                val count = it as Long\n                save<Event> {\n                    if (l.isDebugEnabled) l.debug(\"{}, add-event: event-table: insert\", eventRequest.id)\n                    it.id = eventId(eventRequest)\n                    it.eventTime = eventTime\n                    it.shard = ((count - 1) / Props.int(\"events.receiver.shard.size\")).toInt()\n                    it.status = UN_PROCESSED\n                    it.tenant = eventRequest.tenant\n                    it.xrefId = eventRequest.id\n                    it.bucketId = eventTime.bucket()\n                    it.payload = eventRequest.payload\n                }\n            }\n        }\n    }\n\n    private fun removeEvent0(eventLookup: EventLookup): ListenableFuture<EventLookup> {\n        return { remove<Event> { it.eventTime = eventLookup.eventTime; it.id = eventLookup.eventId; it.shard = eventLookup.shard; it.bucketId = eventLookup.bucketId } }.retriable(\n            \"delete-event-${eventLookup.xrefId}\",\n            Props.int(\"events.receiver.delete.max.retries\"),\n            Props.int(\"events.receiver.delete.initial.delay\"),\n            Props.int(\"events.receiver.delete.backoff.multiplier\")\n        ).transform { eventLookup }\n    }\n\n    fun removeEvent(id: String, tenant: String): ListenableFuture<EventResponse> {\n        val eventResponse = EventResponse().apply { this.id = id; this.tenant = tenant }\n        return fetch<EventLookup> { it.xrefId = id; it.tenant = tenant }.transformAsync { el ->\n            if (el == null) immediateFuture(eventResponse)\n            else {\n                if (l.isDebugEnabled) l.debug(\"removing event: {}/{}\", tenant, id)\n                remove<Event> { it.eventTime = el.eventTime; it.shard = el.shard; it.id = el.eventId; it.bucketId = el.bucketId }.transformAsync {\n                    if (l.isDebugEnabled) l.debug(\"removing event look up: {}/{}\", tenant, id)\n                    remove<EventLookup> { it.tenant = el.tenant; it.xrefId = el.xrefId }.transform {\n                        if (l.isDebugEnabled) l.debug(\"event removed successfully : {}/{}\", tenant, id)\n                        eventResponse.apply { eventStatus = DELETED; eventId = it?.eventId }\n                    }\n                }\n            }\n        }.catching {\n            l.error(\"error in removing the event: {}/{}\", tenant, id, it.rootCause())\n            eventResponse.error = Error(500, it.rootCause()?.message)\n            eventResponse.apply { eventStatus = ERROR }\n        }\n    }\n\n    private fun validate(eventRequest: EventRequest): ListenableFuture<EventResponse>? {\n        if (eventRequest.tenant == null) {\n            val eventResponse = eventRequest.toResponse()\n            eventResponse.eventStatus = REJECTED\n            eventResponse.error = Error(400, \"tenant not present\")\n            l.error(\"event rejected, tenant missing, {}\", eventRequest.json())\n            return immediateFuture<EventResponse>(eventResponse)\n        }\n        if (eventRequest.eventTime == null) {\n            val eventResponse = eventRequest.toResponse()\n            eventResponse.eventStatus = REJECTED\n            eventResponse.error = Error(400, \"event time not present\")\n            l.error(\"event rejected, event time not present, {} \", eventRequest.json())\n            return immediateFuture<EventResponse>(eventResponse)\n        }\n        if (eventRequest.tenant!! !in module<ProcessorRegistry>().registeredTenants()) {\n            val eventResponse = eventRequest.toResponse()\n            eventResponse.eventStatus = REJECTED\n            eventResponse.error = Error(400, \"tenant not registered / unknown tenant: ${eventRequest.tenant}\")\n            l.error(\"event rejected, unknown tenant. Did you register one in the processors.config?, {}\", eventRequest.json())\n            return immediateFuture<EventResponse>(eventResponse)\n        }\n        if (eventRequest.deliveryOption == PAYLOAD_ONLY && eventRequest.payload == null) {\n            val eventResponse = eventRequest.toResponse()\n            eventResponse.eventStatus = REJECTED\n            eventResponse.error = Error(400, \"payload must not be null for deliveryOption $PAYLOAD_ONLY\")\n            l.error(\"event rejected, null payload for '$PAYLOAD_ONLY' option: $eventRequest\")\n            return immediateFuture<EventResponse>(eventResponse)\n        }\n        try {\n            ZonedDateTime.parse(eventRequest.eventTime)\n        } catch (e: Exception) {\n            val eventResponse = eventRequest.toResponse()\n            eventResponse.eventStatus = REJECTED\n            eventResponse.error = Error(400, \"event time can not be parsed. Must be in ISO 8601 format.\")\n            l.error(\"event rejected, bad event time format, {}\", eventRequest.json())\n            return immediateFuture<EventResponse>(eventResponse)\n        }\n\n        if (ZonedDateTime.parse(eventRequest.eventTime).isBefore(nowUTC().plusMinutes(lapseOffset.toLong()))) {\n            val eventResponse = eventRequest.toResponse()\n            eventResponse.eventStatus = TRIGGERED\n            eventResponse.triggeredAt = nowUTC().toString()\n            if (l.isDebugEnabled) l.debug(\"lapsed event received, marking it {}, eventRequest: {}\", TRIGGERED, eventRequest.json())\n            return immediateFuture<EventResponse>(eventResponse)\n        }\n        return null\n    }\n\n    internal class CountIncrementer :\n        Idso(EVENT_RECEIVER_ADD_EVENT), EntryProcessor<ZonedDateTime, Bucket?>, EntryBackupProcessor<ZonedDateTime, Bucket?> {\n\n        companion object {\n            private val l = logger<CountIncrementer>()\n        }\n\n        override fun getBackupProcessor() = this\n\n        override fun processBackup(entry: MutableEntry<ZonedDateTime, Bucket?>?) {\n            process(entry!!)\n        }\n\n        override fun process(entry: MutableEntry<ZonedDateTime, Bucket?>): Long? {\n            val b = if (entry.value == null) entityProvider<Bucket>().let { it.raw(it.selector(Bucket::class.java)) } else entry.value!!\n            b.count = (b.count ?: 0) + 1L\n            b.updatedAt = nowUTC()\n            if (b.status == null)\n                b.status = UN_PROCESSED\n            entry.setValue(b)\n            if (l.isDebugEnabled) l.debug(\"bucket-id: {}, old-count: {}, new-count: {} \", entry.key, b.count!! - 1, b.count)\n            return b.count\n        }\n\n        override fun writeData(out: ObjectDataOutput) {\n        }\n\n        override fun readData(`in`: ObjectDataInput) {\n        }\n    }\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/api/EventService.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.api\n\nimport com.google.common.base.Throwables.getStackTraceAsString\nimport com.walmartlabs.bigben.BigBen.module\nimport com.walmartlabs.bigben.entities.*\nimport com.walmartlabs.bigben.entities.EventStatus.*\nimport com.walmartlabs.bigben.extns.*\nimport com.walmartlabs.bigben.processors.ProcessorConfig\nimport com.walmartlabs.bigben.processors.ProcessorRegistry\nimport com.walmartlabs.bigben.tasks.StatusTask\nimport com.walmartlabs.bigben.utils.*\nimport com.walmartlabs.bigben.utils.hz.Hz\nimport com.walmartlabs.bigben.utils.hz.Service\nimport java.io.Serializable\nimport java.util.concurrent.Callable\n\n/**\n * Created by smalik3 on 2/27/18\n */\nclass EventService(\n    private val hz: Hz, private val service: Service,\n    private val receiver: EventReceiver\n) {\n\n    companion object {\n        private val l = logger<EventService>()\n    }\n\n    fun clusterStats() = response {\n        hz.hz.getExecutorService(\"default\").submitToAllMembers(StatusTask(service.name))\n            .mapKeys { it.key.address.toString() }\n            .mapValues { it.value.result { \"Error: ${getStackTraceAsString(it)}\" } }\n    }\n\n    fun schedule(events: List<EventRequest>) = response {\n        events.map { if (it.mode == Mode.UPSERT) receiver.addEvent(it) else receiver.removeEvent(it.id!!, it.tenant!!) }\n            .reduce().result { emptyList() }.run {\n                filter { it.eventStatus == TRIGGERED }.map { module<ProcessorRegistry>()(it.event()) }\n                    .done({ l.error(\"error in triggering lapsed events:\", it.rootCause()) }) {\n                        it!!.forEach {\n                            l.warn(\n                                \"event was triggered immediately (likely lapsed), event bucketId: {}, tenant: {}, \" +\n                                        \"eventTime: {}, currentTime: {}\", it.xrefId, it.tenant, it.eventTime, nowUTC()\n                            )\n                        }\n                    }\n                count { it.eventStatus == REJECTED }.let {\n                    when {\n                        it == events.size -> APIResponse(this, 400)\n                        it > 0 -> APIResponse(this, 206)\n                        else -> APIResponse(this)\n                    }\n                }\n            }\n    }\n\n    fun registerProcessor(config: ProcessorConfig) = response {\n        if (l.isInfoEnabled) l.info(\"saving the tenant config: $config\")\n        require(config.tenant != null) { \"tenant must not be null\" }\n        save<KV> { it.key = \"tenants\"; it.column = config.tenant; it.value = config.json() }\n        if (l.isInfoEnabled) l.info(\"broadcasting the tenant config to all members: $config\")\n        hz.hz.getExecutorService(\"default\").submitToAllMembers(ProcessRegisterTask(config))\n            .mapValues { it.value.listenable() }.values.toList().reduce().result { throw RuntimeException(\"\") }\n        module<ProcessorRegistry>().registeredConfigs()\n    }\n\n    fun registeredTenants() = response { module<ProcessorRegistry>().registeredConfigs() }\n\n    fun find(id: String, tenant: String) = response {\n        find(EventRequest().apply {\n            this.id = id; this.tenant = tenant\n        }, false)\n    }\n\n    fun dryrun(id: String, tenant: String) = response {\n        find(EventRequest().apply {\n            this.id = id; this.tenant = tenant\n        }, true)\n    }\n\n    private fun find(eventRequest: EventRequest, fire: Boolean): EventResponse? {\n        val eventResponse = eventRequest.toResponse()\n        return if (eventRequest.id != null && eventRequest.id!!.trim().isNotEmpty()) {\n            fetch<EventLookup> { it.xrefId = eventRequest.id; it.tenant = eventRequest.tenant }.result { null }\n                ?.let { el ->\n                    fetch<Event> {\n                        it.id = el.eventId; it.eventTime = el.eventTime; it.shard = el.shard; it.bucketId = el.bucketId\n                    }.result { null }?.run {\n                        eventResponse.also {\n                            it.eventId = id; it.eventTime = eventTime?.toString(); it.payload = payload\n                            it.eventStatus = status; if (status != UN_PROCESSED && status != null) it.triggeredAt =\n                            processedAt?.toString(); it.deliveryOption = deliveryOption(this)\n                            if (error != null) it.error = com.walmartlabs.bigben.entities.Error(500, error)\n                        }.also { if (fire) module<ProcessorRegistry>()(this) }\n                    }\n                }\n        } else {\n            throw IllegalArgumentException(\"null id\")\n        }\n    }\n\n    class ProcessRegisterTask(private val config: ProcessorConfig) : Serializable, Callable<ProcessorConfig?> {\n        override fun call() = module<ProcessorRegistry>().register(config)\n    }\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/core/BucketManager.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.core\n\nimport com.google.common.collect.HashMultimap\nimport com.google.common.collect.Multimap\nimport com.google.common.util.concurrent.Futures.immediateFuture\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.google.common.util.concurrent.ListenableScheduledFuture\nimport com.google.common.util.concurrent.MoreExecutors.listeningDecorator\nimport com.walmartlabs.bigben.BigBen.entityProvider\nimport com.walmartlabs.bigben.core.ScheduleScanner.Companion.BUCKET_CACHE\nimport com.walmartlabs.bigben.entities.Bucket\nimport com.walmartlabs.bigben.entities.EventStatus\nimport com.walmartlabs.bigben.entities.EventStatus.*\nimport com.walmartlabs.bigben.extns.toSet\nimport com.walmartlabs.bigben.utils.*\nimport com.walmartlabs.bigben.utils.commons.Props.int\nimport com.walmartlabs.bigben.utils.hz.Hz\nimport java.time.ZonedDateTime\nimport java.util.*\nimport java.util.concurrent.ConcurrentHashMap\nimport java.util.concurrent.ScheduledThreadPoolExecutor\nimport java.util.concurrent.ThreadFactory\nimport java.util.concurrent.TimeUnit.SECONDS\nimport java.util.concurrent.atomic.AtomicInteger\n\n/**\n * Created by smalik3 on 2/21/18\n */\nclass BucketManager(private val maxBuckets: Int, private val maxProcessingTime: Int, private val bucketWidth: Int, hz: Hz) {\n\n    companion object {\n        private val l = logger<BucketManager>()\n        private val index = AtomicInteger()\n        val scheduler = listeningDecorator(ScheduledThreadPoolExecutor(4, ThreadFactory { Thread(it, \"BucketManager-${index.getAndIncrement()}\") }))!!\n\n        internal fun emptyBucket(bucketId: ZonedDateTime) = entityProvider<Bucket>().let { it.raw(it.selector(Bucket::class.java)).apply { this.bucketId = bucketId; count = 0L; status = EMPTY } }\n    }\n\n    private val shardSize = int(\"events.receiver.shard.size\")\n    private val statusSyncer = StatusSyncer()\n    private val buckets = ConcurrentHashMap<ZonedDateTime, BucketSnapshot>()\n    private val cache = hz.hz.getMap<ZonedDateTime, Bucket>(BUCKET_CACHE)\n\n    @Volatile\n    private var bucketsLoader: BucketsLoader? = null\n\n    private val eventStatuses = setOf(null, EMPTY, UN_PROCESSED, PROCESSED)\n\n    fun getProcessableShardsForOrBefore(bucketId: ZonedDateTime): ListenableFuture<out Multimap<ZonedDateTime, Int>> {\n        if (bucketsLoader == null) {\n            if (l.isInfoEnabled) l.info(\"starting the background load of previous buckets\")\n            val fetchSize = int(\"buckets.background.load.fetch.size\")\n            bucketsLoader = BucketsLoader(maxBuckets - 1, fetchSize, bucketWidth, bucketId) {\n                buckets[it.bucketId!!] = when (it.status) {\n                    in eventStatuses -> BucketSnapshot.with(it.bucketId!!, it.count!!, shardSize, it.status ?: UN_PROCESSED)\n                    ERROR -> {\n                        require(it.failedShards != null && it.failedShards!!.isNotEmpty()) { \"${it.bucketId} is marked $ERROR but has no failed shards information\" }\n                        if (l.isInfoEnabled) l.info(\"bucket ${it.bucketId} has shard failures: ${it.failedShards}, scheduling them for reprocessing\")\n                        BucketSnapshot(it.bucketId!!, it.count!!, BitSet(), it.failedShards!!.fold(BitSet()) { b, i -> b.apply { set(i) } })\n                    }\n                    else -> throw IllegalArgumentException(\"invalid bucket status: $it\")\n                }\n            }.apply { run() }\n        }\n        return HashMultimap.create<ZonedDateTime, Int>().let { shards ->\n            cache.getAsync(bucketId).listenable().transform {\n                val bucket = it ?: emptyBucket(bucketId)\n                if (buckets.putIfAbsent(bucketId, BucketSnapshot.with(bucketId, bucket.count!!, shardSize, bucket.status!!)) != null) {\n                    l.warn(\"bucket with bucketId {} already existed in the cache, this is highly unusual\", bucketId)\n                }\n                buckets.entries.filter { e -> e.value.awaiting.cardinality() > 0 }.forEach { e -> e.value.awaiting.stream().forEach { s -> shards.put(e.key, s) } }\n                if (l.isInfoEnabled) l.info(\"processable shards at bucket: {}, are => {}\", bucketId, shards)\n                if (!shards.containsKey(bucketId)) {\n                    if (l.isInfoEnabled) l.info(\"no events in the bucket: {}\", bucketId)\n                }; shards\n            }.catching { e -> shards.also { l.warn(\"error in loading bucket: {}, will be retried again during next scan\", bucketId, e.rootCause()) } }\n        }\n    }\n\n    internal fun registerForProcessing(pairs: Collection<Pair<ZonedDateTime, Int>>) {\n        pairs.forEach { p -> buckets[p.first]!!.processing(p.second) }\n        purgeIfNeeded()\n        startShardsTimer(pairs)\n    }\n\n    private fun startShardsTimer(pairs: Collection<Pair<ZonedDateTime, Int>>): ListenableScheduledFuture<*> {\n        return pairs.sortedWith(Comparator { p1, p2 ->\n            p1.first.compareTo(p2.first).let { if (it != 0) it else p1.second.compareTo(p2.second) }\n        }).map { \"${it.first}[${it.second}]\" }.toList().let {\n            if (l.isDebugEnabled) l.debug(\"starting processing timer for shards: {}\", it)\n            scheduler.schedule({ checkShardsStatus(pairs, it) }, maxProcessingTime.toLong(), SECONDS)\n        }\n    }\n\n    @Synchronized\n    private fun checkShardsStatus(pairs: Collection<Pair<ZonedDateTime, Int>>, shards: List<String>) {\n        try {\n            pairs.forEach {\n                val bd = buckets[it.first]\n                if (bd != null && bd.processing.get(it.second)) {\n                    l.warn(\"bulk timer for shard: {}[{}] expired, marking the shard as failure\", it.first, it.second)\n                    bd.done(it.second, ERROR)\n                }\n            }\n        } catch (e: Exception) {\n            l.error(\"error in timing out the shards for processing, shards: {}\", shards, e)\n        }\n    }\n\n    @Synchronized\n    internal fun shardDone(bucketId: ZonedDateTime, shard: Int?, status: EventStatus) {\n        val bd = buckets[bucketId]\n        if (bd == null) {\n            l.warn(\"bucket {} not found in cache, might have been purged, ignoring this call\", bucketId)\n            return\n        }\n        bd.done(shard!!, status)\n    }\n\n    private val noOp = immediateFuture<Bucket>(null)\n\n    @Synchronized\n    internal fun bucketProcessed(bucketId: ZonedDateTime, status: EventStatus): ListenableFuture<Bucket> {\n        val bd = buckets[bucketId]\n        if (bd == null) {\n            l.warn(\"bucket {} not found in cache, this is extremely unusual\", bucketId)\n            return noOp\n        }\n        bd.processing.clear()\n        if (status == PROCESSED) {\n            if (l.isInfoEnabled) l.info(\"bucket {} done, marking it as {}, all shards done\", bucketId, status)\n            bd.awaiting.clear()\n        } else if (status == ERROR)\n            l.warn(\"bucket {} done, marking it as {}, failed shards are: {}\", bucketId, status, bd.awaiting)\n        return statusSyncer.syncBucket(bucketId, status, true, bd.awaiting.toSet() + bd.processing.toSet())\n    }\n\n    fun purgeIfNeeded() {\n        when {\n            buckets.size <= maxBuckets -> if (l.isDebugEnabled) l.debug(\"nothing to purge\")\n            else -> {\n                if (l.isDebugEnabled) l.debug(\"initiating purge check for buckets: {}\", this.buckets)\n                val task = {\n                    buckets.keys.sorted().take(buckets.size - maxBuckets).map { b ->\n                        buckets[b]!!.let {\n                            if (it.processing.cardinality() > 0) {\n                                if (l.isDebugEnabled) l.debug(\"skipping purge of bucket {}, shards are still being processed\", b)\n                                immediateFuture(it)\n                            } else {\n                                if (l.isDebugEnabled) l.debug(\"purging bucket snapshot: {}\", it)\n                                val bs = buckets.remove(it.id)\n                                when {\n                                    it.count == 0L -> immediateFuture(it)\n                                    it.awaiting.cardinality() == 0 -> {\n                                        if (l.isDebugEnabled) l.debug(\"bucket {} is processed\", b)\n                                        statusSyncer.syncBucket(b, PROCESSED, false, bs!!.awaiting.toSet() + bs.processing.toSet()).transform { bs }\n                                    }\n                                    else -> {\n                                        l.warn(\"bucket {} is marked error as final status\", b)\n                                        statusSyncer.syncBucket(b, ERROR, false, bs!!.awaiting.toSet() + bs.processing.toSet()).transform { bs }\n                                    }\n                                }\n                            }\n                        }\n                    }.reduce()\n                }\n                task.retriable().done({ l.error(\"error in purging snapshots\", it.rootCause()) }) {\n                    if (l.isInfoEnabled) l.info(\"purged buckets: {}\", it?.map { it!!.id })\n                }\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/core/BucketSnapshot.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.core\n\nimport com.walmartlabs.bigben.entities.EventStatus\nimport com.walmartlabs.bigben.entities.EventStatus.ERROR\nimport com.walmartlabs.bigben.entities.EventStatus.PROCESSED\nimport com.walmartlabs.bigben.utils.logger\nimport java.time.ZonedDateTime\nimport java.util.*\n\ninternal data class BucketSnapshot(val id: ZonedDateTime, val count: Long, val processing: BitSet, val awaiting: BitSet) {\n\n    companion object {\n        private val l = logger<BucketSnapshot>()\n        private val EMPTY = BitSet()\n\n        fun with(id: ZonedDateTime, count: Long, shardSize: Int, status: EventStatus): BucketSnapshot {\n            val shards = (if (count % shardSize == 0L) count / shardSize else count / shardSize + 1).toInt()\n            val awaiting = if (count == 0L || PROCESSED == status) EMPTY else {\n                BitSet(shards).apply { set(0, shards) }\n            }\n            when {\n                count == 0L -> if (l.isDebugEnabled) l.debug(\"bucket: {} => empty, no events\", id)\n                awaiting === EMPTY -> if (l.isDebugEnabled) l.debug(\"bucket: {} => already done\", id)\n                else -> {\n                    if (l.isDebugEnabled) l.debug(\"bucket: {} => has {} events, resulting in {} shards\", id, count, shards)\n                }\n            }\n            return BucketSnapshot(id, count, BitSet(), awaiting)\n        }\n    }\n\n    fun processing(shard: Int) = apply { awaiting.clear(shard); processing.set(shard) }\n\n    fun done(shard: Int, status: EventStatus) {\n        processing.clear(shard)\n        when (status) {\n            PROCESSED -> {\n                if (l.isInfoEnabled) l.info(\"shard: {}[{}] finished successfully\", id, shard)\n                awaiting.clear(shard)\n            }\n            ERROR -> {\n                if (l.isInfoEnabled) l.info(\"shard: {}[{}] finished with error\", id, shard)\n                awaiting.set(shard)\n            }\n            else -> throw IllegalArgumentException(\"invalid status value: $status\")\n        }\n    }\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/core/BucketsLoader.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.core\n\nimport com.google.common.util.concurrent.ListenableScheduledFuture\nimport com.walmartlabs.bigben.core.BucketManager.Companion.scheduler\nimport com.walmartlabs.bigben.entities.Bucket\nimport com.walmartlabs.bigben.extns.fetch\nimport com.walmartlabs.bigben.utils.commons.Props\nimport com.walmartlabs.bigben.utils.commons.TaskExecutor\nimport com.walmartlabs.bigben.utils.done\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.rootCause\nimport java.time.ZonedDateTime\nimport java.util.concurrent.TimeUnit.SECONDS\nimport java.util.concurrent.atomic.AtomicBoolean\nimport java.util.concurrent.atomic.AtomicReference\n\n/**\n * Created by smalik3 on 2/22/18\n */\nclass BucketsLoader(private val lookbackRange: Int, private val fetchSize: Int,\n                    private val bucketWidth: Int, private val bucketId: ZonedDateTime, private val consumer: (Bucket) -> Unit) : Runnable {\n\n    companion object {\n        private val l = logger<BucketsLoader>()\n    }\n\n    private val waitInterval = Props.int(\"buckets.background.load.wait.interval.seconds\")\n    private val runningJob = AtomicReference<ListenableScheduledFuture<*>>()\n    private val taskExecutor = TaskExecutor(setOf(Exception::class.java))\n\n    override fun run() {\n        l.info(\"starting the background load of buckets at a rate of {} buckets per {} seconds until {} buckets are loaded\", fetchSize, waitInterval, lookbackRange)\n        runningJob.set(scheduler.schedule({ load(0) }, 0, SECONDS))\n    }\n\n    private fun load(fromIndex: Int) {\n        if (fromIndex >= lookbackRange) {\n            if (l.isInfoEnabled) l.info(\"lookback range reached, bucket loading is finished\")\n        } else {\n            if (l.isInfoEnabled) l.info(\"initiating background load of buckets from index: {}\", fromIndex)\n            val currentBucketIndex = AtomicReference<Int>()\n            val atLeastOne = AtomicBoolean()\n            (1..fetchSize).forEach {\n                val bucketIndex = fromIndex + it\n                if (bucketIndex <= lookbackRange) {\n                    currentBucketIndex.set(bucketIndex)\n                    val bId = bucketId.minusSeconds((bucketIndex * bucketWidth).toLong())\n                    if (l.isDebugEnabled) l.debug(\"loading bucket: {}, failures will be retried {} times, every {} seconds\", bId, lookbackRange - bucketIndex + 1, bucketWidth)\n                    taskExecutor.async(\"bucket-load:$bId\", lookbackRange - bucketIndex + 1, bucketWidth, 1) { fetch<Bucket> { it.bucketId = bId } }\n                            .done({ l.error(\"error in loading bucket {}, system is giving up\", bId, it.rootCause()) }) {\n                                if (l.isDebugEnabled) l.debug(\"bucket {} loaded successfully\", bId)\n                                consumer(it.apply { atLeastOne.set(true) } ?: BucketManager.emptyBucket(bId))\n                            }\n                }\n            }\n            runningJob.set(scheduler.schedule({ load(currentBucketIndex.get()) }, (if (!atLeastOne.get()) 0 else waitInterval).toLong(), SECONDS))\n        }\n    }\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/core/ScheduleScanner.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.core\n\nimport com.google.common.collect.Iterators\nimport com.google.common.collect.LinkedHashMultimap\nimport com.google.common.collect.Multimap\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.google.common.util.concurrent.MoreExecutors.listeningDecorator\nimport com.hazelcast.core.IExecutorService\nimport com.hazelcast.core.Member\nimport com.walmartlabs.bigben.entities.EventStatus.ERROR\nimport com.walmartlabs.bigben.entities.EventStatus.PROCESSED\nimport com.walmartlabs.bigben.entities.ShardStatus\nimport com.walmartlabs.bigben.entities.ShardStatusList\nimport com.walmartlabs.bigben.extns.bucketize\nimport com.walmartlabs.bigben.extns.nextScan\nimport com.walmartlabs.bigben.extns.nowUTC\nimport com.walmartlabs.bigben.tasks.BulkShardTask\nimport com.walmartlabs.bigben.utils.*\nimport com.walmartlabs.bigben.utils.commons.Props.int\nimport com.walmartlabs.bigben.utils.hz.Hz\nimport com.walmartlabs.bigben.utils.hz.Service\nimport java.lang.Runtime.getRuntime\nimport java.time.Instant\nimport java.time.ZoneOffset.UTC\nimport java.time.ZonedDateTime\nimport java.time.temporal.ChronoUnit\nimport java.util.concurrent.Executors.newFixedThreadPool\nimport java.util.concurrent.ScheduledThreadPoolExecutor\nimport java.util.concurrent.TimeUnit.MILLISECONDS\nimport java.util.concurrent.TimeUnit.MINUTES\nimport java.util.concurrent.atomic.AtomicInteger\nimport java.util.concurrent.atomic.AtomicReference\nimport kotlin.system.exitProcess\n\n/**\n * Created by smalik3 on 2/23/18\n */\nclass ScheduleScanner(private val hz: Hz) : Service {\n\n    companion object {\n        internal const val BUCKET_CACHE = \"bucketCache\"\n        const val EVENT_SCHEDULER = \"event_scheduler\"\n\n        private val l = logger<ScheduleScanner>()\n\n        private val index = AtomicInteger()\n        private val shardIndexer = AtomicInteger()\n        private val scheduler = listeningDecorator(ScheduledThreadPoolExecutor(getRuntime().availableProcessors()) { r -> Thread(r, \"InternalScheduler#\" + index.getAndIncrement()) })\n        private val shardSubmitters = listeningDecorator(newFixedThreadPool(int(\"events.num.shard.submitters\")) { r -> Thread(r, \"ShardSubmitter#\" + shardIndexer.getAndIncrement()) })\n    }\n\n    private val isShutdown = AtomicReference(false)\n\n    private lateinit var bucketManager: BucketManager\n    private val bucketWidth = int(\"events.schedule.scan.interval.minutes\")\n    @Volatile\n    private lateinit var lastScan: ZonedDateTime\n\n    override val name: String = \"ScheduleScanner\"\n\n    override fun init() {\n        if (l.isInfoEnabled) l.info(\"initing the event scheduler\")\n        val lookbackRange = int(\"buckets.backlog.check.limit\")\n        bucketManager = BucketManager(lookbackRange + 1, 2 * bucketWidth * 60, bucketWidth * 60, hz)\n    }\n\n    override fun execute() {\n        if (l.isInfoEnabled) l.info(\"executing the EventScheduleScanner\")\n        val scanInterval = int(\"events.schedule.scan.interval.minutes\")\n        if (l.isInfoEnabled) l.info(\"calculating the next scan bucketId\")\n        val now = nowUTC()\n        val nextScan = nextScan(now, scanInterval)\n        val delay = ChronoUnit.MILLIS.between(now, nextScan)\n        val bucket = ZonedDateTime.ofInstant(Instant.ofEpochMilli(bucketize(now.toInstant().toEpochMilli(), scanInterval)), UTC)\n        lastScan = bucket.minusMinutes(bucketWidth.toLong())\n        if (l.isInfoEnabled) l.info(\"first-scan at: {}, for bucket: {}, next-scan at: {}, \" + \"initial-delay: {} ms, subsequent-scans: after every {} minutes\", now, bucket, nextScan, delay, scanInterval)\n        scheduler.scheduleAtFixedRate({ this.scan() }, delay, MILLISECONDS.convert(scanInterval.toLong(), MINUTES), MILLISECONDS)\n        if (l.isInfoEnabled) l.info(\"executing first time scan\")\n        scan()\n    }\n\n    private fun scan() {\n        try {\n            if (isShutdown.get()) {\n                if (l.isInfoEnabled) l.info(\"system is shutdown, no more schedules will be processed\")\n                return\n            }\n            val currentBucketId = lastScan.plusMinutes(bucketWidth.toLong())\n            lastScan = currentBucketId\n            scan(currentBucketId, bucketManager)\n        } catch (e: Throwable) {\n            l.error(\"error in running the scheduler\", e.rootCause())\n            if (e is Error) {\n                l.error(\"system will exit now\")\n                exitProcess(1)\n            }\n        }\n    }\n\n    fun scan(currentBucketId: ZonedDateTime, bucketManager: BucketManager) {\n        if (l.isInfoEnabled) l.info(\"scanning the schedule(s) for bucket: {}\", currentBucketId)\n\n        bucketManager.getProcessableShardsForOrBefore(currentBucketId).done({ l.error(\"error in processing bucket: {}\", currentBucketId, it!!.rootCause()) }) {\n            try {\n                if (it!!.isEmpty) {\n                    if (l.isInfoEnabled) l.info(\"nothing to schedule for bucket: {}\", currentBucketId)\n                    return@done\n                }\n                shardSubmitters.submit {\n                    try {\n                        if (l.isDebugEnabled) l.debug(\"{}, shards to be processed: => {}\", currentBucketId, it)\n                        calculateDistro(it).asMap().run {\n                            if (l.isInfoEnabled) l.info(\"{}, schedule distribution: => {}\", currentBucketId,\n                                    mapKeys { it.key.address.toString() }.mapValues { it.value.joinToString(\",\") { \"${it.first}[${it.second}]\" } }.toSortedMap())\n\n                            val iterator = Iterators.cycle<Member>(keys)\n                            val executorService = hz.hz.getExecutorService(EVENT_SCHEDULER)\n                            entries.map {\n                                { submitShards(executorService, iterator.next(), it.value, currentBucketId, bucketManager) }.retriable(\"shards-submit\", int(\"events.submit.max.retries\"),\n                                        int(\"events.submit.initial.delay\"),\n                                        int(\"events.submit.backoff.multiplier\")).transform { it!!.list }\n                            }.reduce().done({ l.error(\"schedule for bucket {} finished abnormally\", currentBucketId, it.rootCause()) }) {\n                                if (l.isDebugEnabled) l.debug(\"schedule for bucket {} finished normally => {}\", currentBucketId, it)\n                                val buckets = it!!.map { it!! }.flatten().filterNotNull().groupBy { it.bucketId!! }.mapValues { it.value.fold(false) { hasError, ss -> hasError || (ss.status == ERROR) } }\n                                if (l.isDebugEnabled) l.debug(\"bucket-scan: {}, final buckets with statuses to be persisted: {}\", currentBucketId, buckets)\n                                buckets.map { bucketManager.bucketProcessed(it.key, if (it.value) ERROR else PROCESSED) }.done({ l.error(\"bucket-scan: {}, failed to update the scan-status: {}\", currentBucketId, buckets.keys, it.rootCause()) }) {\n                                    if (l.isDebugEnabled) l.debug(\"bucket-scan: {}, successfully updated the scan-status: {}\", currentBucketId, buckets.keys)\n                                }\n                            }\n                        }\n                    } catch (e: Exception) {\n                        l.error(\"error in processing bucket: {}\", currentBucketId, e.rootCause())\n                    }\n                }\n            } catch (e: Exception) {\n                l.error(\"error in processing bucket: {}\", currentBucketId, e.rootCause())\n            }\n        }\n    }\n\n    private fun calculateDistro(shards: Multimap<ZonedDateTime, Int>): LinkedHashMultimap<Member, Pair<ZonedDateTime, Int>> {\n        val members = hz.hz.cluster.members.toMutableSet().apply { remove(hz.hz.cluster.localMember) }.toList().shuffled().toMutableList().apply { add(hz.hz.cluster.localMember) }\n        val entries = shards.entries().toList()\n        return LinkedHashMultimap.create<Member, Pair<ZonedDateTime, Int>>().apply {\n            val size = members.size\n            for (i in entries.indices) {\n                val e = entries[i]\n                put(members[i % size], e.key to e.value)\n            }\n        }\n    }\n\n    private fun submitShards(executorService: IExecutorService, member: Member, shardsData: Collection<Pair<ZonedDateTime, Int>>, bucket: ZonedDateTime, bucketManager: BucketManager): ListenableFuture<ShardStatusList> {\n        if (l.isDebugEnabled) l.debug(\"{}, submitting  for execution to member {}, shards: {}\", bucket, member.socketAddress, shardsData)\n        bucketManager.registerForProcessing(shardsData)\n        return executorService.submitToMember(BulkShardTask(shardsData), member).listenable().catching { ShardStatusList(shardsData.map { ShardStatus(it.first, it.second, ERROR) }) }.done({\n            l.error(\"{}, member {} finished abnormally for shards: {}\", bucket, member.socketAddress, shardsData, it)\n            shardsData.forEach { bucketManager.shardDone(it.first, it.second, ERROR) }\n        }) {\n            if (l.isDebugEnabled) l.debug(\"{}, member {} finished normally for shards: {}\", bucket, member.socketAddress, it)\n            it?.list?.forEach { bucketManager.shardDone(it!!.bucketId!!, it.shard!!, it.status!!) }\n        }\n    }\n\n    override fun destroy() {\n        if (l.isInfoEnabled) l.info(\"destroying the event scheduler\")\n    }\n}"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/core/StatusSyncer.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.core\n\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.walmartlabs.bigben.entities.Bucket\nimport com.walmartlabs.bigben.entities.Event\nimport com.walmartlabs.bigben.entities.EventStatus\nimport com.walmartlabs.bigben.extns.nowUTC\nimport com.walmartlabs.bigben.extns.save\nimport com.walmartlabs.bigben.utils.done\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.retriable\nimport java.time.ZonedDateTime\n\ninternal class StatusSyncer {\n    companion object {\n        private val l = logger<StatusSyncer>()\n    }\n\n    fun syncBucket(bucketId: ZonedDateTime, status: EventStatus, setProcessedAt: Boolean, failedShards: Set<Int>?): ListenableFuture<Bucket> {\n        if (l.isDebugEnabled) l.debug(\"bucket {} is done, syncing status as {}\", bucketId, status)\n        return {\n            save<Bucket> {\n                it.bucketId = bucketId; it.status = status\n                if (setProcessedAt) it.processedAt = nowUTC()\n                if (failedShards != null && failedShards.isNotEmpty()) it.failedShards = failedShards else it.failedShards = null\n            }\n        }.retriable().done({ l.error(\"bucket {} could not be synced with status {}, after multiple retries\", bucketId, status, it) })\n        { if (l.isInfoEnabled) l.info(\"bucket {} is successfully synced as {}\", bucketId, status) }\n    }\n\n    fun syncShard(bucketId: ZonedDateTime, shard: Int, eventTime: ZonedDateTime, eventId: String, status: EventStatus, payload: String?): ListenableFuture<Event> {\n        if (l.isDebugEnabled) l.debug(\"shard {}[{}] is done, syncing status as {}, payload: {}\", bucketId, shard, status, payload)\n        return save<Event> { it.id = eventId; it.eventTime = eventTime; it.status = status; if (payload != null) it.payload = payload }.done({ l.error(\"shard {}[{}] could not be synced with status {}, after multiple retries\", bucketId, shard, status, it) }) {\n            if (l.isInfoEnabled) l.info(\"shard {}[{}] is successfully synced with status {}\", bucketId, shard, status)\n        }\n    }\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/entities/EntityProvider.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.entities\n\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.walmartlabs.bigben.extns.epoch\nimport java.time.ZonedDateTime\n\n/**\n * Created by smalik3 on 2/25/18\n */\ninterface EntityProvider<T> {\n    fun selector(type: Class<T>): T\n    fun raw(selector: T): T\n    fun fetch(selector: T): ListenableFuture<T?>\n    fun kvs(selector: KV): ListenableFuture<List<KV>>\n    fun save(selector: T): ListenableFuture<T>\n    fun remove(selector: T): ListenableFuture<T>\n    fun unwrap(): Any?\n}\n\ninterface EventLoader {\n    fun load(bucketId: ZonedDateTime, shard: Int, fetchSize: Int, eventTime: ZonedDateTime = epoch(), eventId: String = \"\", context: Any? = null): ListenableFuture<Pair<Any?, List<Event>>>\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/entities/entities.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.entities\n\nimport com.hazelcast.nio.ObjectDataInput\nimport com.hazelcast.nio.ObjectDataOutput\nimport com.hazelcast.nio.serialization.IdentifiedDataSerializable\nimport com.walmartlabs.bigben.entities.EventDeliveryOption.FULL_EVENT\nimport com.walmartlabs.bigben.entities.Mode.UPSERT\nimport com.walmartlabs.bigben.hz.HzObjectFactory\nimport com.walmartlabs.bigben.hz.HzObjectFactory.ObjectId.SHARD_STATUS\nimport com.walmartlabs.bigben.hz.HzObjectFactory.ObjectId.SHARD_STATUS_LIST\nimport com.walmartlabs.bigben.utils.json\nimport java.time.Instant.ofEpochMilli\nimport java.time.ZoneOffset.UTC\nimport java.time.ZonedDateTime\nimport java.time.ZonedDateTime.ofInstant\n\n/**\n * Created by smalik3 on 2/21/18\n */\nenum class EventStatus { PROCESSED, ERROR, UN_PROCESSED, PROCESSING, TRIGGERED, EMPTY, REJECTED, ACCEPTED, UPDATED, DELETED }\n\ninterface Bucket : IdentifiedDataSerializable {\n    var bucketId: ZonedDateTime?\n    var status: EventStatus?\n    var count: Long?\n    var processedAt: ZonedDateTime?\n    var updatedAt: ZonedDateTime?\n    var failedShards: Set<Int>?\n}\n\ninterface Event : EventResponseMixin {\n    var eventTime: ZonedDateTime?\n    var bucketId: ZonedDateTime?\n    var shard: Int?\n    var id: String?\n    var status: EventStatus?\n    var error: String?\n    var tenant: String?\n    var processedAt: ZonedDateTime?\n    var xrefId: String?\n    var payload: String?\n    var deliveryOption: EventDeliveryOption?\n}\n\ninterface EventLookup {\n    var tenant: String?\n    var xrefId: String?\n    var bucketId: ZonedDateTime?\n    var shard: Int?\n    var eventTime: ZonedDateTime?\n    var eventId: String?\n    var payload: String?\n}\n\ninterface KV {\n    var key: String?\n    var column: String?\n    var value: String?\n}\n\nabstract class Idso(private val objectId: HzObjectFactory.ObjectId) : IdentifiedDataSerializable {\n    override fun getFactoryId() = HzObjectFactory.BIGBEN_FACTORY_ID\n    override fun getId() = objectId.ordinal\n}\n\nabstract class IdsoCallable(private val objectId: HzObjectFactory.ObjectId) : IdentifiedDataSerializable {\n    override fun getFactoryId() = HzObjectFactory.BIGBEN_FACTORY_ID\n    override fun getId() = objectId.ordinal\n    override fun writeData(out: ObjectDataOutput?) {\n    }\n\n    override fun readData(`in`: ObjectDataInput?) {\n    }\n}\n\ndata class ShardStatus(var bucketId: ZonedDateTime? = null, var shard: Int? = null, var status: EventStatus? = null) : Idso(SHARD_STATUS) {\n    override fun writeData(out: ObjectDataOutput) {\n        out.writeLong(bucketId!!.toInstant().toEpochMilli())\n        out.writeInt(shard!!)\n        out.writeByte(status!!.ordinal)\n    }\n\n    override fun readData(`in`: ObjectDataInput) {\n        bucketId = ofInstant(ofEpochMilli(`in`.readLong()), UTC)\n        shard = `in`.readInt()\n        status = EventStatus.values()[`in`.readByte().toInt()]\n    }\n}\n\ndata class ShardStatusList(var list: List<ShardStatus?>? = null) : Idso(SHARD_STATUS_LIST) {\n    override fun writeData(out: ObjectDataOutput) {\n        out.writeInt(list!!.size)\n        list!!.forEach { out.writeObject(it) }\n    }\n\n    override fun readData(`in`: ObjectDataInput) {\n        list = (1..`in`.readInt()).map { `in`.readObject<ShardStatus>() }\n    }\n}\n\nenum class Mode { UPSERT, REMOVE }\nenum class EventDeliveryOption { FULL_EVENT, PAYLOAD_ONLY }\nopen class EventRequest(\n    var id: String? = null, var eventTime: String? = null, var tenant: String? = null,\n    var payload: String? = null, var mode: Mode = UPSERT, var deliveryOption: EventDeliveryOption? = FULL_EVENT\n) {\n    override fun toString() = \"EventRequest(${json()})\"\n}\n\nclass EventResponse(\n    id: String? = null, eventTime: String? = null, tenant: String? = null, mode: Mode = UPSERT, payload: String? = null,\n    var eventId: String? = null, var triggeredAt: String? = null, var eventStatus: EventStatus? = null,\n    var error: Error? = null, deliveryOption: EventDeliveryOption? = FULL_EVENT\n) :\n    EventRequest(id = id, eventTime = eventTime, tenant = tenant, mode = mode, payload = payload, deliveryOption = deliveryOption) {\n    override fun toString() = \"EventResponse(${json()})\"\n}\n\ndata class Error(val code: Int, val message: String?)\n\ninterface EventResponseMixin {\n    var eventResponse: EventResponse?\n}\n\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/extns/_api_response_extns.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.extns\n\nimport com.walmartlabs.bigben.BigBen.module\nimport com.walmartlabs.bigben.utils.hz.Hz\nimport com.walmartlabs.bigben.utils.json\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.rootCause\nimport com.walmartlabs.bigben.utils.stackTraceAsString\nimport java.time.LocalDateTime\nimport java.time.ZoneOffset\nimport java.time.format.DateTimeParseException\n\n/**\n * Created by smalik3 on 6/29/18\n */\nprivate val l = logger(\"API\")\n\ndata class APIResponse(\n    val entity: Any, val status: Int = 200,\n    val headers: Map<String, MutableList<String>> = mutableMapOf()\n) {\n    fun header(name: String, value: String) = apply { (headers[name] ?: mutableListOf()).add(value) }\n}\n\nfun response(f: () -> Any?): APIResponse {\n    val begin = LocalDateTime.now()\n    val r = try {\n        f()?.run { this as? APIResponse ?: APIResponse(this, 200) }\n            ?: APIResponse(mapOf(\"status\" to \"not found\"), 404)\n    } catch (e: Exception) {\n        val t = e.rootCause()!!\n        l.error(\"error in processing request\", t)\n        val status = if (t is IllegalArgumentException || t is DateTimeParseException) 400 else 500\n        val message = \"please contact engineering team with the below error signature\"\n        APIResponse(\n            mutableMapOf(\"message\"\n                                 to (t.message?.let { \"\"\"${t.message}${if (status == 500) \" ($message)\" else \"\"}\"\"\" }\n                ?: \"Unexpected error, $message\")).apply {\n                if (status == 500) {\n                    this[\"error\"] = mapOf(\n                        \"stack\" to t.stackTraceAsString()!!,\n                        \"node\" to module<Hz>().hz.cluster.localMember.address.host,\n                        \"start_time\" to begin,\n                        \"duration\" to (LocalDateTime.now().toInstant(ZoneOffset.UTC).toEpochMilli() - begin.toInstant(ZoneOffset.UTC).toEpochMilli())\n                    ).json()\n                }\n            }, status\n        )\n    }\n    val end = LocalDateTime.now()\n    r.header(\"Start-Time\", begin.toString()).header(\"End-Time\", end.toString())\n        .header(\"Duration\", \"${end.toInstant(ZoneOffset.UTC).toEpochMilli() - begin.toInstant(ZoneOffset.UTC).toEpochMilli()} ms\")\n        .header(\"Node\", module<Hz>().hz.cluster.localMember.address.host)\n    return r\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/extns/_bigben_extns.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.extns\n\nimport com.walmartlabs.bigben.BigBen.entityProvider\nimport com.walmartlabs.bigben.entities.Event\nimport com.walmartlabs.bigben.entities.EventDeliveryOption\nimport com.walmartlabs.bigben.entities.EventDeliveryOption.FULL_EVENT\nimport com.walmartlabs.bigben.entities.EventDeliveryOption.PAYLOAD_ONLY\nimport com.walmartlabs.bigben.entities.EventRequest\nimport com.walmartlabs.bigben.entities.EventResponse\nimport java.time.ZonedDateTime\nimport java.util.*\nimport java.util.UUID.randomUUID\nimport java.util.stream.Collectors\n\nfun EventRequest.toResponse() = EventResponse(\n    tenant = tenant, eventTime = eventTime, id = id,\n    mode = mode, payload = payload, deliveryOption = deliveryOption\n)\n\nfun Event.toResponse() = eventResponse?.let { it }\n    ?: EventResponse(\n        id = xrefId, eventId = id, triggeredAt = processedAt?.toString(),\n        tenant = tenant, eventTime = eventTime?.toString(), payload = payload,\n        eventStatus = status, deliveryOption = deliveryOption(this)\n    )\n\nfun EventResponse.event() = entityProvider<Event>().let { it.raw(it.selector(Event::class.java)) }.also {\n    val t = ZonedDateTime.parse(triggeredAt)\n    it.tenant = tenant; it.xrefId = id; it.eventTime = ZonedDateTime.parse(eventTime)!!; it.payload = payload\n    it.id = eventId; it.bucketId = t.bucket(); it.processedAt = t; if (eventId == null) it.deliveryOption = deliveryOption\n}\n\nfun BitSet.toSet(): MutableSet<Int> = stream().boxed().collect(Collectors.toSet())!!\n\nfun eventId(req: EventRequest) = if (req.deliveryOption == FULL_EVENT) randomUUID().toString() else \"a-${randomUUID()}\"\n\nfun deliveryOption(event: Event): EventDeliveryOption? {\n    return when {\n        event.id != null -> if (event.id!!.startsWith(\"a-\")) PAYLOAD_ONLY else FULL_EVENT\n        event.deliveryOption != null -> event.deliveryOption\n        else -> null\n    }\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/extns/_do_extns.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.extns\n\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.walmartlabs.bigben.BigBen.entityProvider\nimport com.walmartlabs.bigben.entities.KV\n\ninline fun <reified T> fetch(selector: (T) -> Unit): ListenableFuture<T?> {\n    return entityProvider<T>().let { it.fetch(it.selector(T::class.java).apply { selector(this) }) }\n}\n\ninline fun kvs(selector: (KV) -> Unit): ListenableFuture<List<KV>> {\n    return entityProvider<KV>().let { it.kvs(it.selector(KV::class.java).apply { selector(this) }) }\n}\n\ninline fun <reified T> save(selector: (T) -> Unit): ListenableFuture<T> {\n    return entityProvider<T>().let { it.save(it.selector(T::class.java).apply { selector(this) }) }\n}\n\ninline fun <reified T> remove(selector: (T) -> Unit): ListenableFuture<T> {\n    return entityProvider<T>().let { it.remove(it.selector(T::class.java).apply { selector(this) }) }\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/extns/_time_extns.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.extns\n\nimport com.walmartlabs.bigben.utils.commons.Props\nimport java.time.Instant.EPOCH\nimport java.time.Instant.ofEpochMilli\nimport java.time.ZoneOffset.UTC\nimport java.time.ZonedDateTime\nimport java.time.ZonedDateTime.now\nimport java.time.ZonedDateTime.ofInstant\nimport java.time.temporal.ChronoField.MILLI_OF_SECOND\nimport java.time.temporal.ChronoField.MINUTE_OF_HOUR\nimport java.time.temporal.ChronoUnit.HOURS\nimport java.time.temporal.ChronoUnit.MINUTES\nimport java.util.*\n\n/**\n * Created by smalik3 on 2/21/18\n */\nfun bucketize(instant: Long, bucketWidth: Int): Long {\n    val epoch = ofEpochMilli(instant)\n    val hours = epoch.truncatedTo(HOURS).toEpochMilli()\n    val mins = epoch.truncatedTo(MINUTES).toEpochMilli()\n    val delta = (mins - hours) / (60 * 1000)\n    val boundary = delta / bucketWidth * bucketWidth\n    return hours + boundary * 60 * 1000\n}\n\nfun nextScan(zdt: ZonedDateTime, scanInterval: Int): ZonedDateTime {\n    val minZdt = zdt.toInstant().truncatedTo(MINUTES).atZone(UTC)\n    val currentMinutes = minZdt.get(MINUTE_OF_HOUR)\n    val offset = scanInterval - currentMinutes % scanInterval\n    return zdt.plusMinutes(offset.toLong()).withSecond(0).with(MILLI_OF_SECOND, 0).withNano(0)\n}\n\nfun utc(millis: Long): ZonedDateTime {\n    return ofInstant(ofEpochMilli(millis), UTC)\n}\n\nprivate val epochZdt = ofInstant(EPOCH, UTC)\n\nfun epoch(): ZonedDateTime {\n    return epochZdt\n}\n\nfun ZonedDateTime.toDate(): Date {\n    return Date(toInstant().toEpochMilli())\n}\n\nfun Date.toZdt(): ZonedDateTime {\n    return utc(time)\n}\n\nfun nowUTC(): ZonedDateTime {\n    return now(UTC)\n}\n\nfun ZonedDateTime?.bucket() = utc(bucketize(this?.toInstant()?.toEpochMilli() ?:\n        throw NullPointerException(\"null time\"), Props.int(\"events.schedule.scan.interval.minutes\")))\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/hz/BucketStore.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.hz\n\nimport com.hazelcast.core.MapStore\nimport com.walmartlabs.bigben.BigBen.entityProvider\nimport com.walmartlabs.bigben.entities.Bucket\nimport com.walmartlabs.bigben.extns.fetch\nimport com.walmartlabs.bigben.utils.logger\nimport com.walmartlabs.bigben.utils.reduce\nimport com.walmartlabs.bigben.utils.retriable\nimport java.time.ZonedDateTime\nimport java.util.concurrent.TimeUnit.MINUTES\n\n/**\n * Created by smalik3 on 3/3/18\n */\nclass BucketStore : MapStore<ZonedDateTime, Bucket> {\n\n    private val l = logger<BucketStore>()\n\n    private val provider = entityProvider<Bucket>()\n\n    override fun deleteAll(keys: MutableCollection<ZonedDateTime>?) {\n        throw UnsupportedOperationException(\"not supported\")\n    }\n\n    override fun load(key: ZonedDateTime): Bucket? {\n        return { fetch<Bucket> { it.bucketId = key } }.retriable(\"load-bucket: $key\").get(1, MINUTES)\n    }\n\n    override fun loadAll(keys: Collection<ZonedDateTime>): Map<ZonedDateTime, Bucket> {\n        return keys.map { k -> { fetch<Bucket> { it.bucketId = k } }.retriable(\"load-bucket: $k\") }.reduce().get(1, MINUTES).associate { it!!.bucketId!! to it }\n    }\n\n    override fun store(key: ZonedDateTime, value: Bucket) {\n        if (l.isDebugEnabled) l.debug(\"saving bucket: {}\", key);\n        { provider.save(value.apply { bucketId = key }) }.retriable(\"save-bucket: $key\").get(1, MINUTES)\n    }\n\n    override fun storeAll(map: Map<ZonedDateTime, Bucket>) {\n        if (l.isDebugEnabled) l.debug(\"saving buckets: {}\", map.keys)\n        map.entries.map { e -> { provider.save(e.run { value.bucketId = key; value }) }.retriable(\"save-bucket: ${e.key}\") }\n    }\n\n    override fun loadAllKeys(): Iterable<ZonedDateTime>? = null\n\n    override fun delete(key: ZonedDateTime) {\n        throw UnsupportedOperationException(\"not supported\")\n    }\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/hz/HzObjectFactory.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.hz\n\nimport com.hazelcast.nio.serialization.DataSerializableFactory\nimport com.hazelcast.nio.serialization.IdentifiedDataSerializable\nimport com.walmartlabs.bigben.BigBen.entityProvider\nimport com.walmartlabs.bigben.api.EventReceiver.Companion.CACHED_PROCESSOR\nimport com.walmartlabs.bigben.entities.Bucket\nimport com.walmartlabs.bigben.entities.ShardStatus\nimport com.walmartlabs.bigben.entities.ShardStatusList\nimport com.walmartlabs.bigben.hz.HzObjectFactory.ObjectId.*\nimport com.walmartlabs.bigben.tasks.BulkShardTask\nimport com.walmartlabs.bigben.tasks.ShutdownTask\nimport com.walmartlabs.bigben.tasks.StatusTask\n\n/**\n * Created by smalik3 on 3/10/18\n */\nclass HzObjectFactory : DataSerializableFactory {\n    companion object {\n        const val BIGBEN_FACTORY_ID = 1\n    }\n\n    enum class ObjectId {\n        EVENT_RECEIVER_ADD_EVENT,\n        BULK_EVENT_TASK,\n        SHUTDOWN_TASK,\n        CLUSTER_STATUS_TASK,\n        SHARD_STATUS,\n        SHARD_STATUS_LIST,\n        BUCKET\n    }\n\n    override fun create(typeId: Int): IdentifiedDataSerializable {\n        return when (values()[typeId]) {\n            EVENT_RECEIVER_ADD_EVENT -> CACHED_PROCESSOR\n            BULK_EVENT_TASK -> BulkShardTask()\n            SHUTDOWN_TASK -> ShutdownTask()\n            SHARD_STATUS -> ShardStatus()\n            SHARD_STATUS_LIST -> ShardStatusList()\n            CLUSTER_STATUS_TASK -> StatusTask()\n            BUCKET -> entityProvider<Bucket>().let { it.raw(it.selector(Bucket::class.java)) }\n        }\n    }\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/modules.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben\n\nimport com.walmartlabs.bigben.api.EventReceiver\nimport com.walmartlabs.bigben.api.EventService\nimport com.walmartlabs.bigben.core.ScheduleScanner\nimport com.walmartlabs.bigben.utils.commons.Module\nimport com.walmartlabs.bigben.utils.commons.ModuleRegistry\nimport com.walmartlabs.bigben.utils.commons.Props\nimport com.walmartlabs.bigben.utils.hz.ClusterSingleton\nimport com.walmartlabs.bigben.utils.hz.Hz\nimport com.walmartlabs.bigben.utils.logger\n\n/**\n * Created by smalik3 on 9/18/18\n */\nobject EventModule : Module {\n\n    private val l = logger<EventModule>()\n\n    override fun init(registry: ModuleRegistry) {\n        val hz = registry.module<Hz>()\n        l.info(\"initializing event receiver\")\n        registry.register(EventReceiver(hz))\n        l.info(\"initializing event service\")\n        registry.register(EventService(hz, registry.module(), registry.module()))\n    }\n}\n\nobject SchedulerModule : Module {\n\n    private val l = logger<SchedulerModule>()\n\n    override fun init(registry: ModuleRegistry) {\n        val scheduler = ScheduleScanner(registry.module())\n        registry.register(scheduler)\n        if (Props.boolean(\"events.scheduler.enabled\")) {\n            l.info(\"initializing cluster singleton\")\n            ClusterSingleton(scheduler, registry.module())\n        } else {\n            l.info(\"skipping initializing cluster scheduler\")\n        }\n        l.info(\"Scheduler module initialized successfully\")\n    }\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/processors/no_ops.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.processors\n\nimport com.google.common.util.concurrent.Futures.immediateFuture\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.walmartlabs.bigben.entities.Event\nimport com.walmartlabs.bigben.entities.EventResponse\nimport com.walmartlabs.bigben.utils.Json\n\n/**\n * Created by smalik3 on 6/25/18\n */\nclass NoOpCustomClassProcessor(tenant: String, props: Json) : EventProcessor<Event> {\n    override fun invoke(t: Event): ListenableFuture<Event> {\n        return immediateFuture(t)\n    }\n}\n\nclass NoOpMessageProducerFactory : MessageProducerFactory {\n    override fun create(tenant: String, props: Json): MessageProducer {\n        return object : MessageProducer {\n            override fun produce(e: EventResponse): ListenableFuture<*> {\n                return immediateFuture(e)\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/processors/processors.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.processors\n\nimport com.google.common.base.Throwables.getStackTraceAsString\nimport com.google.common.cache.CacheBuilder\nimport com.google.common.net.HttpHeaders.ACCEPT\nimport com.google.common.net.HttpHeaders.CONTENT_TYPE\nimport com.google.common.net.MediaType.ANY_TYPE\nimport com.google.common.net.MediaType.JSON_UTF_8\nimport com.google.common.util.concurrent.Futures.immediateFailedFuture\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.google.common.util.concurrent.SettableFuture\nimport com.ning.http.client.AsyncCompletionHandler\nimport com.ning.http.client.AsyncHttpClient\nimport com.ning.http.client.Response\nimport com.walmartlabs.bigben.entities.Event\nimport com.walmartlabs.bigben.entities.EventDeliveryOption.FULL_EVENT\nimport com.walmartlabs.bigben.entities.EventDeliveryOption.PAYLOAD_ONLY\nimport com.walmartlabs.bigben.entities.EventResponse\nimport com.walmartlabs.bigben.entities.EventStatus.*\nimport com.walmartlabs.bigben.extns.deliveryOption\nimport com.walmartlabs.bigben.extns.kvs\nimport com.walmartlabs.bigben.extns.nowUTC\nimport com.walmartlabs.bigben.extns.toResponse\nimport com.walmartlabs.bigben.processors.ProcessorConfig.Type.*\nimport com.walmartlabs.bigben.utils.*\nimport com.walmartlabs.bigben.utils.commons.Module\nimport com.walmartlabs.bigben.utils.commons.ModuleRegistry\nimport com.walmartlabs.bigben.utils.commons.Props\nimport com.walmartlabs.bigben.utils.commons.Props.boolean\nimport com.walmartlabs.bigben.utils.commons.Props.int\nimport com.walmartlabs.bigben.utils.commons.Props.map\nimport java.io.Serializable\nimport java.lang.String.format\nimport java.util.concurrent.ConcurrentHashMap\nimport java.util.concurrent.ExecutionException\n\n\n/**\n * Created by smalik3 on 2/24/18\n */\ntypealias EventProcessor<T> = (t: T) -> ListenableFuture<T>\n\ndata class ProcessorConfig(var tenant: String? = null, var type: Type? = null, var props: Json? = null) : Serializable {\n    enum class Type {\n        MESSAGING, HTTP, CUSTOM_CLASS\n    }\n}\n\nobject ProcessorRegistry : EventProcessor<Event>, Module {\n\n    private val l = logger<ProcessorRegistry>()\n    private val ASYNC_HTTP_CLIENT = AsyncHttpClient()\n\n    private val configs: MutableMap<String, ProcessorConfig>\n    private val processorCache = CacheBuilder.newBuilder().build<String, EventProcessor<Event>>()\n\n    private val messageProducerFactory: MessageProducerFactory = Class.forName(Props.string(\"messaging.producer.factory.class\")).newInstance() as MessageProducerFactory\n\n    init {\n        if (l.isInfoEnabled) l.info(\"loading configs\")\n        configs = ConcurrentHashMap(kvs { it.key = \"tenants\" }.result { l.error(\"error in loading tenant configs\", it); throw it.rootCause()!! }\n                                        .map { ProcessorConfig::class.java.fromJson(it.value!!) }.associate { it.tenant!! to it })\n        if (l.isInfoEnabled) l.info(\"configs parsed: {}\", configs)\n    }\n\n    override fun init(registry: ModuleRegistry) {\n        if (boolean(\"events.processor.eager.loading\", false)) {\n            if (l.isInfoEnabled) l.info(\"creating the processors right away\")\n            configs.forEach { getOrCreate(it.value) }\n            if (l.isInfoEnabled) l.info(\"all processors created\")\n        } else\n            if (l.isInfoEnabled) l.info(\"processors will be created when required\")\n        l.info(\"ProcessorRegistry module loaded successfully\")\n    }\n\n    override fun invoke(e: Event): ListenableFuture<Event> {\n        try {\n            e.status = TRIGGERED\n            e.error = null\n            e.processedAt = nowUTC()\n\n            return { getOrCreate(configs[e.tenant]).invoke(e) }.retriable(\n                \"processor-e-id: ${e.id}\",\n                int(\"events.processor.max.retries\"), int(\"events.processor.initial.delay\"), int(\"events.processor.backoff.multiplier\")\n            ).apply {\n                transform {\n                    if (TRIGGERED == e.status) {\n                        e.status = e.error?.let { ERROR } ?: PROCESSED\n                    }\n                }.catching {\n                    l.error(\n                        \"error in processing event by processor after multiple retries, will be retried later if within \" +\n                                \"'buckets.backlog.check.limit', e-id: ${e.xrefId}\", it.rootCause()\n                    )\n                    e.status = ERROR\n                    e.error = it?.let { getStackTraceAsString(it) } ?: \"null error\"\n                }\n            }\n        } catch (ex: Exception) {\n            e.status = ERROR\n            e.error = getStackTraceAsString(ex.rootCause()!!)\n            return immediateFailedFuture<Event>(ex.rootCause()!!)\n        }\n    }\n\n    private fun getOrCreate(processorConfig: ProcessorConfig?): EventProcessor<Event> {\n        val eventContent = fun(e: Event): String {\n            return when (deliveryOption(e)) {\n                FULL_EVENT -> e.toResponse().json()\n                PAYLOAD_ONLY -> e.payload!!\n                else -> e.toResponse().json()\n            }\n        }\n        return try {\n            when (processorConfig?.type) {\n                MESSAGING -> processorCache.get(processorConfig.tenant!!) {\n                    if (l.isInfoEnabled) l.info(\"creating message processor for tenant: ${processorConfig.tenant}\")\n                    val mp = messageProducerFactory.create(processorConfig.tenant!!, map(\"kafka.producer.config\").mapKeys { it.key.removePrefix(\"kafka.producer.config.\") } + processorConfig.props!!)\n                    object : EventProcessor<Event> {\n                        override fun invoke(e: Event): ListenableFuture<Event> {\n                            if (l.isDebugEnabled) if (l.isDebugEnabled) l.debug(\"tenant: ${processorConfig.tenant}, processing event: ${e.xrefId}\")\n                            return mp.produce(e.toResponse()).transform { if (l.isDebugEnabled) l.debug(\"tenant: ${processorConfig.tenant}, event produced successfully: ${e.xrefId}\"); e }\n                        }\n                    }\n                }\n                HTTP -> {\n                    processorCache.get(processorConfig.tenant!!) {\n                        {\n                            SettableFuture.create<Event>().apply {\n                                try {\n                                    val builder = ASYNC_HTTP_CLIENT.preparePost(processorConfig.props!![\"url\"].toString()).setBody(eventContent(it))\n                                    @Suppress(\"UNCHECKED_CAST\")\n                                    (processorConfig.props!![\"headers\"] as? Map<String, String>)?.let {\n                                        if (l.isDebugEnabled) l.debug(\"adding custom headers: {}\", it)\n                                        it.forEach { builder.setHeader(it.key, it.value) }\n                                    }\n                                    builder.setHeader(ACCEPT, ANY_TYPE.toString()).setHeader(CONTENT_TYPE, JSON_UTF_8.toString())\n                                    if (l.isDebugEnabled) l.debug(\"tenant: ${processorConfig.tenant}, processing event: ${it.xrefId}\")\n                                    builder.execute(object : AsyncCompletionHandler<Response>() {\n                                        override fun onCompleted(response: Response): Response {\n                                            val code = response.statusCode\n                                            if (code in 200..299 || code in 400..499) {\n                                                if (code < 400) {\n                                                    if (l.isDebugEnabled)\n                                                        l.debug(format(\"event processed successfully, response code: {}, response body: {}, event: {}\", code, response.responseBody, it.xrefId))\n                                                } else {\n                                                    l.warn(format(\"got a 'bad request' response with status code: {}, event will not be retried anymore, event: {}\", code, it.xrefId))\n                                                    it.error = response.responseBody\n                                                }\n                                                set(it)\n                                            } else {\n                                                setException(RuntimeException(response.responseBody))\n                                            }\n                                            return response\n                                        }\n\n                                        override fun onThrowable(t: Throwable) {\n                                            setException(t.rootCause()!!)\n                                        }\n                                    })\n                                } catch (ex: Exception) {\n                                    setException(ex.rootCause()!!)\n                                }\n                            }\n                        }\n                    }\n                }\n                CUSTOM_CLASS -> processorCache.get(processorConfig.tenant!!) custom@{\n                    try {\n                        @Suppress(\"UNCHECKED_CAST\")\n                        (Class.forName(processorConfig.props!![\"eventProcessorClass\"].toString()) as Class<EventProcessor<Event>>).run {\n                            constructors.forEach {\n                                when {\n                                    it.parameterCount == 0 -> return@custom it.newInstance() as EventProcessor<Event>\n                                    it.parameterCount == 2\n                                            && it.parameterTypes[0] == String::class.java\n                                            && Map::class.java.isAssignableFrom(it.parameterTypes[1]) ->\n                                        return@custom it.newInstance(processorConfig.tenant, processorConfig.props) as EventProcessor<Event>\n                                }\n                            }\n                            throw IllegalArgumentException(\n                                \"no suitable constructor found for custom processor: $this, \" +\n                                        \"either a no-args constructor or a constructor with parameters (String, Map<String, Object>) is required\"\n                            )\n                        }\n                    } catch (ex: Exception) {\n                        throw RuntimeException(ex.rootCause())\n                    }\n                }\n                null -> throw IllegalArgumentException(\"null processor type: $processorConfig\")\n            }\n        } catch (e: ExecutionException) {\n            throw RuntimeException(e)\n        }\n    }\n\n    fun register(config: ProcessorConfig?): ProcessorConfig? {\n        require(config != null) { \"null processor config\" }\n        require(config.tenant != null && config.tenant!!.trim().isNotEmpty()) { \"null or empty tenantId\" }\n        require(config.type != null) { \"null processor type\" }\n        require(config.props != null && !config.props!!.isEmpty()) { \"null or empty properties\" }\n        if (l.isInfoEnabled) l.info(\"registering new processor\")\n        val previous = configs.put(config.tenant!!, config)\n        processorCache.invalidate(config.tenant!!)\n        return previous\n    }\n\n    fun registeredTenants(): Set<String> {\n        return configs.keys.toSet()\n    }\n\n    fun registeredConfigs(): Map<String, ProcessorConfig> {\n        return configs\n    }\n}\n\ninterface MessageProducerFactory {\n    fun create(tenant: String, props: Json): MessageProducer\n}\n\ninterface MessageProducer {\n    fun produce(e: EventResponse): ListenableFuture<*>\n}\n\ninterface MessageProcessor {\n    fun init()\n}\n"
  },
  {
    "path": "lib/src/main/kotlin/com/walmartlabs/bigben/tasks/tasks.kt",
    "content": "/*-\n * #%L\n * BigBen:lib\n * =======================================\n * Copyright (C) 2016 - 2018 Walmart Inc.\n * =======================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n * \n *      http://www.apache.org/licenses/LICENSE-2.0\n * \n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * #L%\n */\npackage com.walmartlabs.bigben.tasks\n\nimport com.google.common.base.Throwables.getStackTraceAsString\nimport com.google.common.util.concurrent.AsyncCallable\nimport com.google.common.util.concurrent.Futures.immediateFuture\nimport com.google.common.util.concurrent.ListenableFuture\nimport com.google.common.util.concurrent.MoreExecutors.listeningDecorator\nimport com.hazelcast.core.HazelcastInstance\nimport com.hazelcast.core.HazelcastInstanceAware\nimport com.hazelcast.nio.ObjectDataInput\nimport com.hazelcast.nio.ObjectDataOutput\nimport com.hazelcast.nio.serialization.IdentifiedDataSerializable\nimport com.walmartlabs.bigben.BigBen.entityProvider\nimport com.walmartlabs.bigben.BigBen.module\nimport com.walmartlabs.bigben.entities.*\nimport com.walmartlabs.bigben.entities.EventStatus.ERROR\nimport com.walmartlabs.bigben.entities.EventStatus.PROCESSED\nimport com.walmartlabs.bigben.extns.epoch\nimport com.walmartlabs.bigben.hz.HzObjectFactory\nimport com.walmartlabs.bigben.hz.HzObjectFactory.ObjectId.BULK_EVENT_TASK\nimport com.walmartlabs.bigben.hz.HzObjectFactory.ObjectId.SHUTDOWN_TASK\nimport com.walmartlabs.bigben.processors.EventProcessor\nimport com.walmartlabs.bigben.utils.*\nimport com.walmartlabs.bigben.utils.commons.Props.int\nimport com.walmartlabs.bigben.utils.hz.ClusterSingleton\nimport java.lang.System.currentTimeMillis\nimport java.time.Instant\nimport java.time.ZoneOffset.UTC\nimport java.time.ZonedDateTime\nimport java.util.*\nimport java.util.concurrent.Callable\nimport java.util.concurrent.ScheduledThreadPoolExecutor\nimport java.util.concurrent.ThreadFactory\nimport java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy\nimport java.util.concurrent.TimeUnit.MILLISECONDS\nimport java.util.concurrent.atomic.AtomicInteger\nimport kotlin.math.max\nimport kotlin.math.min\n\n/**\n * Created by smalik3 on 2/23/18\n */\nclass BulkShardTask(private var shards: Collection<Pair<ZonedDateTime, Int>>? = null) : Callable<ShardStatusList>, IdentifiedDataSerializable, HazelcastInstanceAware, Idso(BULK_EVENT_TASK) {\n\n    companion object {\n        private val l = logger<BulkShardTask>()\n        private val NO_OP = immediateFuture<List<ShardStatus>>(ArrayList())\n    }\n\n    private lateinit var hz: HazelcastInstance\n\n    override fun call(): ShardStatusList {\n        try {\n            return ShardStatusList(execute().get())\n        } catch (e: Exception) {\n            l.error(\"error in processing events\", e.rootCause())\n            throw RuntimeException(e)\n        }\n    }\n\n    private fun execute(): ListenableFuture<List<ShardStatus>> {\n        val shards = shards!!\n        if (shards.isEmpty()) return NO_OP\n        if (l.isDebugEnabled) l.debug(\"{}, executing bulk event task for buckets/shards on node: {}\", shards.map { \"${it.first}/${it.second}\" }, hz.cluster.localMember.socketAddress)\n        val fetchSizeHint = int(\"events.tasks.max.events.in.memory\") / shards.size\n        if (l.isInfoEnabled) l.info(\"starting processing of ${shards.sortedBy { it.first }}\")\n        return shards.map { s ->\n            try {\n                ShardTask(s, fetchSizeHint, module(), module()).call().done(\n                        { l.error(\"error in executing shard: bucket: {}, shard: {}\", s.first, s.second, it.rootCause()) }) {\n                    if (l.isInfoEnabled) l.info(\"shard processed, bucket: {}, shard: {}\", s.first, s.second)\n                }.catching {\n                    l.error(\"error in executing shard, returning an ERROR status bucket: {}, shard: {}\", s.first, s.second, it.rootCause())\n                    ShardStatus(s.first, s.second, ERROR)\n                }\n            } catch (e: Exception) {\n                l.error(\"error in submitting shard for execution: bucket: {}, shard: {}\", s.first, s.second, e.rootCause())\n                immediateFuture(ShardStatus(s.first, s.second, ERROR))\n            }\n        }.reduce()\n    }\n\n    override fun writeData(out: ObjectDataOutput) {\n        out.writeInt(shards!!.size)\n        shards!!.forEach {\n            out.writeLong(it.first.toInstant().toEpochMilli())\n            out.writeInt(it.second)\n        }\n    }\n\n    override fun readData(`in`: ObjectDataInput) {\n        shards = (1..`in`.readInt()).map { ZonedDateTime.ofInstant(Instant.ofEpochMilli(`in`.readLong()), UTC) to `in`.readInt() }.toList()\n    }\n\n    override fun setHazelcastInstance(hazelcastInstance: HazelcastInstance) {\n        this.hz = hazelcastInstance\n    }\n}\n\nclass ShardTask(private val p: Pair<ZonedDateTime, Int>, fetchSizeHint: Int,\n                private val processor: EventProcessor<Event>, private val loader: EventLoader) : Callable<ListenableFuture<ShardStatus>> {\n\n    companion object {\n        private val l = logger<ShardTask>()\n\n        private val index = AtomicInteger()\n        private val scheduler = listeningDecorator(ScheduledThreadPoolExecutor(\n                int(\"events.tasks.scheduler.worker.threads\"), ThreadFactory { Thread(it, \"event-processor#\" + index.getAndIncrement()) }, CallerRunsPolicy()))\n    }\n\n    private val executionKey = \"${p.first}[${p.second}]\"\n    private val fetchSize = max(10, min(fetchSizeHint, 400))\n\n    override fun call(): ListenableFuture<ShardStatus> {\n        if (l.isDebugEnabled) l.debug(\"{}, processing shard with fetch size: {}\", executionKey, fetchSize)\n        return loadAndProcess(epoch(), \"\", null).transform {\n            it!!.second.fold(false) { b, e -> b || e.status == ERROR }.run {\n                if (l.isDebugEnabled) {\n                    if (this) l.debug(\"{}, errors in processing shard\", executionKey)\n                    else l.debug(\"{}, shard processed successfully\", executionKey)\n                }\n                ShardStatus(p.first, p.second, if (this) ERROR else PROCESSED)\n            }\n        }\n    }\n\n    private fun loadAndProcess(eventTime: ZonedDateTime, eventId: String, context: Any?): ListenableFuture<Pair<Any?, List<Event>>> {\n        return loader.load(p.first, p.second, fetchSize, eventTime, eventId, context).transformAsync { rp ->\n            val events = rp!!.second\n            if (events.isEmpty()) immediateFuture(rp.first to events)\n            else events.filter { it.status != PROCESSED }.map { e ->\n                schedule(e).done(\n                    { l.error(\"{}/{}/{} event has error in processing\", executionKey, e.eventTime, e.id, it.rootCause()) }\n                ) { if (l.isDebugEnabled) l.debug(\"{}/{}/{} event is processed successfully\", executionKey, e.eventTime, e.id) }\n            }.reduce().transformAsync {\n                if (events.size >= fetchSize)\n                    loadAndProcess(events.last().eventTime!!, events.last().id!!, rp.first)\n                else immediateFuture(rp.first to events)\n            }\n        }\n    }\n\n    private fun schedule(e: Event): ListenableFuture<Event> {\n        val delay = e.eventTime!!.toInstant().toEpochMilli() - currentTimeMillis()\n        return if (delay <= 0) {\n            if (l.isDebugEnabled) l.debug(\"{}, event {} time has expired, processing immediately\", executionKey, e.id)\n            process(e).transformAsync { save(e) }\n        } else {\n            if (l.isDebugEnabled) l.debug(\"{}, scheduling event '{}' after delay {}, at {}\", executionKey, e.id, delay, e.eventTime!!)\n            AsyncCallable { processor(e) }.scheduleAsync(delay, MILLISECONDS, scheduler).transformAsync { save(it!!) }\n        }\n    }\n\n    private fun process(e: Event): ListenableFuture<Event> {\n        return try {\n            if (l.isDebugEnabled) l.debug(\"{}, processing event: {}\", executionKey, e.id)\n            processor.invoke(e).apply {\n                transform { if (l.isDebugEnabled) l.debug(\"{}, processed event: {}\", executionKey, e.id) }.catching { ex ->\n                    l.error(\"{}, error in processing event, marking it {}\", executionKey, ERROR, ex.rootCause())\n                    e.status?.let { e.status = ERROR; e.error = getStackTraceAsString(ex!!) }\n                }\n            }\n        } catch (t: Throwable) {\n            l.error(\"{}, error in processing event: {}\", executionKey, e.id, t.rootCause())\n            e.status = ERROR\n            e.error = getStackTraceAsString(t.rootCause()!!)\n            immediateFuture(e)\n        }\n    }\n\n    private fun save(e: Event): ListenableFuture<Event> {\n        if (l.isDebugEnabled) l.debug(\"{}, saving event: {} to the DB, the status is '{}'\", executionKey, e.id, e.status)\n        return entityProvider<Event>().save(e)\n    }\n}\n\ninternal class ShutdownTask : IdsoCallable(SHUTDOWN_TASK), Callable<Boolean> {\n    override fun call(): Boolean {\n        TODO()\n    }\n}\n\ninternal class StatusTask(private var serviceName: String? = null) : Idso(HzObjectFactory.ObjectId.CLUSTER_STATUS_TASK), Callable<String> {\n    override fun call() = if (ClusterSingleton.ACTIVE_SERVICES.contains(serviceName)) \"Master\" else \"Slave\"\n    override fun writeData(out: ObjectDataOutput) = out.writeUTF(serviceName)\n    override fun readData(`in`: ObjectDataInput) = `in`.let { serviceName = it.readUTF() }\n}\n\n"
  },
  {
    "path": "lib/src/main/resources/hz.template.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!--\n  #%L\n  BigBen:lib\n  =======================================\n  Copyright (C) 2016 - 2018 Walmart Inc.\n  =======================================\n  Licensed under the Apache License, Version 2.0 (the \"License\");\n  you may not use this file except in compliance with the License.\n  You may obtain a copy of the License at\n  \n       http://www.apache.org/licenses/LICENSE-2.0\n  \n  Unless required by applicable law or agreed to in writing, software\n  distributed under the License is distributed on an \"AS IS\" BASIS,\n  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  See the License for the specific language governing permissions and\n  limitations under the License.\n  #L%\n  -->\n\n<!-- DO NOT INDENT THIS MACRO -->\n<hazelcast xsi:schemaLocation=\"http://www.hazelcast.com/schema/config hazelcast-config-3.5.xsd\"\n           xmlns=\"http://www.hazelcast.com/schema/config\"\n           xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n    <properties>\n        <property name=\"hazelcast.logging.type\">slf4j</property>\n    </properties>\n    <group>\n        <name>${group.name bigben}</name>\n        <password>${group.password bigben-pass}</password>\n    </group>\n    <management-center>${management.url http://localhost:8080/mancenter}</management-center>\n    <network>\n        <port auto-increment=\"${network.autoIncrementPort false}\">${network.port 5701}</port>\n        <outbound-ports>\n            <ports>0</ports>\n        </outbound-ports>\n        <join>\n            <multicast enabled=\"false\"/>\n            <tcp-ip enabled=\"true\">\n                <members>${network.members 127.0.0.1}</members>\n            </tcp-ip>\n        </join>\n    </network>\n    <map name=\"bucketCache\">\n        <in-memory-format>OBJECT</in-memory-format>\n        <map-store initial-mode=\"LAZY\">\n            <class-name>com.walmartlabs.bigben.hz.BucketStore</class-name>\n            <write-delay-seconds>${map.store.writeDelay 60}</write-delay-seconds>\n            <write-batch-size>${map.store.batchSize 1000}</write-batch-size>\n            <write-coalescing>${map.store.writeCoalescing true}</write-coalescing>\n        </map-store>\n        <backup-count>${map.backupCount 1}</backup-count>\n        <async-backup-count>${map.asyncBackupCount 0}</async-backup-count>\n        <time-to-live-seconds>${map.ttl 0}</time-to-live-seconds>\n        <max-idle-seconds>${map.maxIdleSeconds 0}</max-idle-seconds>\n        <eviction-policy>${map.evictionPolicy LRU}</eviction-policy>\n        <max-size policy=\"${map.sizePolicy PER_NODE}\">${map.MaxSize 1000000}</max-size>\n        <eviction-percentage>${map.evictionPercentage 25}</eviction-percentage>\n        <min-eviction-check-millis>${map.evictionCheckMillis 2000}</min-eviction-check-millis>\n        <merge-policy>${map.mergePolicy com.hazelcast.map.merge.LatestUpdateMapMergePolicy}</merge-policy>\n    </map>\n    <map name=\"crons\">\n        <in-memory-format>OBJECT</in-memory-format>\n        <map-store initial-mode=\"EAGER\">\n            <class-name>com.walmartlabs.bigben.cron.CronMapStore</class-name>\n        </map-store>\n        <backup-count>${cron.backupCount 1}</backup-count>\n        <async-backup-count>${cron.async.backupCount 0}</async-backup-count>\n        <time-to-live-seconds>0</time-to-live-seconds>\n        <eviction-policy>NONE</eviction-policy>\n    </map>\n    <serialization>\n        <data-serializable-factories>\n            <data-serializable-factory factory-id=\"1\">com.walmartlabs.bigben.hz.HzObjectFactory\n            </data-serializable-factory>\n        </data-serializable-factories>\n    </serialization>\n</hazelcast>\n"
  },
  {
    "path": "pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n    <parent>\n        <groupId>com.walmartlabs</groupId>\n        <artifactId>walmartlabs-pom</artifactId>\n        <version>1</version>\n    </parent>\n    <groupId>com.walmartlabs.bigben</groupId>\n    <artifactId>bigben</artifactId>\n    <packaging>pom</packaging>\n    <version>1.0.7-SNAPSHOT</version>\n    <modules>\n        <module>commons</module>\n        <module>lib</module>\n        <module>cron</module>\n        <module>cassandra</module>\n        <module>kafka</module>\n        <module>app</module>\n    </modules>\n    <name>BigBen:parent</name>\n    <url>http://walmartlabs.com</url>\n\n    <properties>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n        <kotlin.version>1.3.11</kotlin.version>\n        <slf4j-api.version>1.7.25</slf4j-api.version>\n        <guava.version>24.1.1-jre</guava.version>\n        <jackson.version>2.10.0.pr1</jackson.version>\n        <hazelcast.version>3.8.6</hazelcast.version>\n        <async-http-client.version>1.9.31</async-http-client.version>\n        <maven-compiler-plugin.version>3.5.1</maven-compiler-plugin.version>\n        <jackson-module-kotlin.version>2.9.4.1</jackson-module-kotlin.version>\n        <commons-lang3.version>3.7</commons-lang3.version>\n        <commons-text.version>1.2</commons-text.version>\n        <ktor.version>1.3.0</ktor.version>\n        <skipTests>true</skipTests>\n    </properties>\n\n    <repositories>\n        <repository>\n            <id>jcenter</id>\n            <url>http://jcenter.bintray.com</url>\n        </repository>\n    </repositories>\n\n    <licenses>\n        <license>\n            <name>Apache License, Version 2.0</name>\n            <url>https://www.apache.org/licenses/LICENSE-2.0.txt</url>\n            <distribution>repo</distribution>\n        </license>\n    </licenses>\n\n    <developers>\n        <developer>\n            <id>wmt</id>\n            <name>WalmartLabs Open Source Developers</name>\n            <email>opensource@walmartlabs.com</email>\n            <roles>\n                <role>Contributor</role>\n            </roles>\n            <timezone>-8</timezone>\n        </developer>\n        <developer>\n            <id>smalik3</id>\n            <name>Sandeep Malik</name>\n            <email>smalik@walmartlabs.com</email>\n            <roles>\n                <role>Project Lead</role>\n            </roles>\n            <timezone>-8</timezone>\n        </developer>\n        <developer>\n            <id>sandeep.malik</id>\n            <name>Sandeep Malik</name>\n            <email>sandeep.malik@gmail.com</email>\n            <roles>\n                <role>Project Lead</role>\n            </roles>\n            <timezone>-8</timezone>\n        </developer>\n    </developers>\n\n    <dependencyManagement>\n        <dependencies>\n            <dependency>\n                <groupId>com.walmartlabs.bigben</groupId>\n                <artifactId>bigben-commons</artifactId>\n                <version>1.0.7-SNAPSHOT</version>\n            </dependency>\n            <dependency>\n                <groupId>com.walmartlabs.bigben</groupId>\n                <artifactId>bigben-lib</artifactId>\n                <version>1.0.7-SNAPSHOT</version>\n            </dependency>\n            <dependency>\n                <groupId>com.walmartlabs.bigben</groupId>\n                <artifactId>bigben-cassandra</artifactId>\n                <version>1.0.7-SNAPSHOT</version>\n            </dependency>\n            <dependency>\n                <groupId>com.walmartlabs.bigben</groupId>\n                <artifactId>bigben-kafka</artifactId>\n                <version>1.0.7-SNAPSHOT</version>\n            </dependency>\n            <dependency>\n                <groupId>com.walmartlabs.bigben</groupId>\n                <artifactId>bigben-cron</artifactId>\n                <version>1.0.7-SNAPSHOT</version>\n            </dependency>\n            <dependency>\n                <groupId>org.slf4j</groupId>\n                <artifactId>slf4j-api</artifactId>\n                <version>${slf4j-api.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>com.google.guava</groupId>\n                <artifactId>guava</artifactId>\n                <version>${guava.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>com.fasterxml.jackson.core</groupId>\n                <artifactId>jackson-databind</artifactId>\n                <version>${jackson.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>com.fasterxml.jackson.module</groupId>\n                <artifactId>jackson-module-kotlin</artifactId>\n                <version>${jackson-module-kotlin.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>com.hazelcast</groupId>\n                <artifactId>hazelcast</artifactId>\n                <version>${hazelcast.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>com.ning</groupId>\n                <artifactId>async-http-client</artifactId>\n                <version>${async-http-client.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>org.apache.commons</groupId>\n                <artifactId>commons-lang3</artifactId>\n                <version>${commons-lang3.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>org.apache.commons</groupId>\n                <artifactId>commons-text</artifactId>\n                <version>${commons-text.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>org.jetbrains.kotlin</groupId>\n                <artifactId>kotlin-reflect</artifactId>\n                <version>${kotlin.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>org.testng</groupId>\n                <artifactId>testng</artifactId>\n                <version>6.14.3</version>\n                <scope>test</scope>\n            </dependency>\n            <dependency>\n                <groupId>io.ktor</groupId>\n                <artifactId>ktor-server-core</artifactId>\n                <version>${ktor.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>io.ktor</groupId>\n                <artifactId>ktor-server-netty</artifactId>\n                <version>${ktor.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>io.netty</groupId>\n                <artifactId>netty-codec-http2</artifactId>\n                <version>4.1.24.Final</version>\n            </dependency>\n            <dependency>\n                <groupId>io.ktor</groupId>\n                <artifactId>ktor-jackson</artifactId>\n                <version>${ktor.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>io.github.microutils</groupId>\n                <artifactId>kotlin-logging</artifactId>\n                <version>1.6.22</version>\n            </dependency>\n            <dependency>\n                <groupId>io.ktor</groupId>\n                <artifactId>ktor-client-core</artifactId>\n                <version>${ktor.version}</version>\n                <scope>test</scope>\n            </dependency>\n            <dependency>\n                <groupId>io.ktor</groupId>\n                <artifactId>ktor-client-apache</artifactId>\n                <version>${ktor.version}</version>\n                <scope>test</scope>\n            </dependency>\n        </dependencies>\n    </dependencyManagement>\n\n    <dependencies>\n        <dependency>\n            <groupId>org.jetbrains.kotlin</groupId>\n            <artifactId>kotlin-stdlib-jdk8</artifactId>\n            <version>${kotlin.version}</version>\n        </dependency>\n        <dependency>\n            <groupId>org.jetbrains.kotlin</groupId>\n            <artifactId>kotlin-test</artifactId>\n            <version>${kotlin.version}</version>\n            <scope>test</scope>\n        </dependency>\n    </dependencies>\n\n    <build>\n        <pluginManagement>\n            <plugins>\n                <plugin>\n                    <artifactId>kotlin-maven-plugin</artifactId>\n                    <groupId>org.jetbrains.kotlin</groupId>\n                    <version>${kotlin.version}</version>\n                    <executions>\n                        <execution>\n                            <id>compile</id>\n                            <goals>\n                                <goal>compile</goal>\n                            </goals>\n                            <configuration>\n                                <jvmTarget>1.8</jvmTarget>\n                                <sourceDirs>\n                                    <sourceDir>${project.basedir}/src/main/kotlin</sourceDir>\n                                    <sourceDir>${project.basedir}/src/main/java</sourceDir>\n                                </sourceDirs>\n                            </configuration>\n                        </execution>\n                        <execution>\n                            <id>test-compile</id>\n                            <goals>\n                                <goal>test-compile</goal>\n                            </goals>\n                            <configuration>\n                                <jvmTarget>1.8</jvmTarget>\n                                <sourceDirs>\n                                    <sourceDir>${project.basedir}/src/test/kotlin</sourceDir>\n                                    <sourceDir>${project.basedir}/src/test/java</sourceDir>\n                                </sourceDirs>\n                            </configuration>\n                        </execution>\n                    </executions>\n                </plugin>\n                <plugin>\n                    <groupId>org.apache.maven.plugins</groupId>\n                    <artifactId>maven-compiler-plugin</artifactId>\n                    <version>${maven-compiler-plugin.version}</version>\n                    <executions>\n                        <!-- Replacing default-compile as it is treated specially by maven -->\n                        <execution>\n                            <id>default-compile</id>\n                            <phase>none</phase>\n                        </execution>\n                        <!-- Replacing default-testCompile as it is treated specially by maven -->\n                        <execution>\n                            <id>default-testCompile</id>\n                            <phase>none</phase>\n                        </execution>\n                        <execution>\n                            <id>java-compile</id>\n                            <phase>compile</phase>\n                            <goals>\n                                <goal>compile</goal>\n                            </goals>\n                        </execution>\n                        <execution>\n                            <id>java-test-compile</id>\n                            <phase>test-compile</phase>\n                            <goals>\n                                <goal>testCompile</goal>\n                            </goals>\n                        </execution>\n                    </executions>\n                    <configuration>\n                        <source>1.8</source>\n                        <target>1.8</target>\n                    </configuration>\n                </plugin>\n                <plugin>\n                    <groupId>org.apache.maven.plugins</groupId>\n                    <artifactId>maven-javadoc-plugin</artifactId>\n                    <version>2.10.1</version>\n                    <configuration>\n                        <javadocExecutable>${java.home}/../bin/javadoc</javadocExecutable>\n                    </configuration>\n                    <executions>\n                        <execution>\n                            <id>attach-javadocs</id>\n                            <goals>\n                                <goal>jar</goal>\n                            </goals>\n                            <configuration>\n                                <additionalparam>-Xdoclint:none</additionalparam>\n                            </configuration>\n                        </execution>\n                    </executions>\n                </plugin>\n                <plugin>\n                    <groupId>org.apache.maven.plugins</groupId>\n                    <artifactId>maven-surefire-plugin</artifactId>\n                    <version>2.22.0</version>\n                    <configuration>\n                        <!--<forkMode>never</forkMode>-->\n                        <skipTests>${skipTests}</skipTests>\n                        <systemPropertyVariables>\n                            <buildDirectory>${project.build.directory}</buildDirectory>\n                        </systemPropertyVariables>\n                    </configuration>\n                </plugin>\n                <plugin>\n                    <groupId>org.apache.maven.plugins</groupId>\n                    <artifactId>maven-dependency-plugin</artifactId>\n                    <version>3.0.2</version>\n                    <executions>\n                        <execution>\n                            <id>install</id>\n                            <phase>install</phase>\n                            <goals>\n                                <goal>analyze-only</goal>\n                            </goals>\n                        </execution>\n                    </executions>\n                </plugin>\n                <plugin>\n                    <groupId>org.apache.maven.plugins</groupId>\n                    <artifactId>maven-release-plugin</artifactId>\n                    <version>2.5.3</version>\n                    <configuration>\n                        <localCheckout>true</localCheckout>\n                    </configuration>\n                </plugin>\n                <plugin>\n                    <groupId>org.codehaus.mojo</groupId>\n                    <artifactId>exec-maven-plugin</artifactId>\n                    <version>1.4.0</version>\n                    <executions>\n                        <execution>\n                            <goals>\n                                <goal>exec</goal>\n                            </goals>\n                        </execution>\n                    </executions>\n                    <configuration>\n                        <executable>maven</executable>\n                    </configuration>\n                </plugin>\n                <plugin>\n                    <groupId>org.codehaus.mojo</groupId>\n                    <artifactId>license-maven-plugin</artifactId>\n                    <version>1.16</version>\n                    <configuration>\n                        <inceptionYear>2016</inceptionYear>\n                        <copyrightOwners>Walmart Inc.</copyrightOwners>\n                        <addJavaLicenseAfterPackage>false</addJavaLicenseAfterPackage>\n                        <organizationName>Walmart Inc.</organizationName>\n                        <licenseFile>${project.basedir}/LICENSE.txt</licenseFile>\n                        <canUpdateCopyright>true</canUpdateCopyright>\n                        <canUpdateDescription>true</canUpdateDescription>\n                        <canUpdateLicense>false</canUpdateLicense>\n                        <licenseName>apache_v2</licenseName>\n                        <sectionDelimiter>=======================================</sectionDelimiter>\n                        <roots>\n                            <root>src/</root>\n                        </roots>\n                    </configuration>\n                    <executions>\n                        <execution>\n                            <id>first</id>\n                            <goals>\n                                <goal>update-file-header</goal>\n                                <goal>update-project-license</goal>\n                            </goals>\n                            <phase>process-sources</phase>\n                        </execution>\n                    </executions>\n                </plugin>\n            </plugins>\n        </pluginManagement>\n    </build>\n\n    <scm>\n        <connection>scm:git:https://github.com/walmartlabs/bigben.git</connection>\n        <developerConnection>scm:git:https://github.com/walmartlabs/bigben</developerConnection>\n        <tag>HEAD</tag>\n        <url>scm:git:https://github.com/walmartlabs/bigben.git</url>\n    </scm>\n\n</project>\n"
  },
  {
    "path": "run_bigben_standalone.sh",
    "content": "#!/usr/bin/env bash\necho Building BigBen\nmvn clean install\ncd app/target\necho Starting BigBen\njava -jar bigben.jar"
  }
]